[ 465.588343] env[61998]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61998) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 465.588707] env[61998]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61998) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 465.588812] env[61998]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61998) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 465.589110] env[61998]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 465.687267] env[61998]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61998) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 465.697338] env[61998]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61998) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 466.303299] env[61998]: INFO nova.virt.driver [None req-de4f6ae8-5fdb-49fb-8a75-5df2059ce774 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 466.376162] env[61998]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 466.376323] env[61998]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 466.376421] env[61998]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61998) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 469.480515] env[61998]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-725e56be-384a-44a6-bc3f-86bdc199da89 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.497228] env[61998]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61998) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 469.497430] env[61998]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-b735ccfb-296d-43ef-bec8-0910b2dc3398 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.522200] env[61998]: INFO oslo_vmware.api [-] Successfully established new session; session ID is f202d. [ 469.522384] env[61998]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.146s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 469.522872] env[61998]: INFO nova.virt.vmwareapi.driver [None req-de4f6ae8-5fdb-49fb-8a75-5df2059ce774 None None] VMware vCenter version: 7.0.3 [ 469.526338] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e584e921-42f7-4969-83ef-fad2e6c23520 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.543260] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf93aae-7d92-4a78-bed4-35896830f6fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.548848] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598cd993-a350-4f07-a8ce-514cbbf653ba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.555265] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e345d429-be60-4e22-9c4f-9a39c691e77e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.568050] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4917dd9b-82cb-407c-afe6-e8593814e632 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.573750] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c261a05-e42d-4f0f-a5d5-5aa1ad4ea93b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.604039] env[61998]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-3b92cb36-2d8b-442a-890e-761b049e7808 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.608844] env[61998]: DEBUG nova.virt.vmwareapi.driver [None req-de4f6ae8-5fdb-49fb-8a75-5df2059ce774 None None] Extension org.openstack.compute already exists. {{(pid=61998) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 469.611470] env[61998]: INFO nova.compute.provider_config [None req-de4f6ae8-5fdb-49fb-8a75-5df2059ce774 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 470.115013] env[61998]: DEBUG nova.context [None req-de4f6ae8-5fdb-49fb-8a75-5df2059ce774 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),cc503533-2f83-435d-b5b6-cd32a3d4b210(cell1) {{(pid=61998) load_cells /opt/stack/nova/nova/context.py:464}} [ 470.117168] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.117403] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.118100] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 470.118549] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Acquiring lock "cc503533-2f83-435d-b5b6-cd32a3d4b210" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.118737] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Lock "cc503533-2f83-435d-b5b6-cd32a3d4b210" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.119783] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Lock "cc503533-2f83-435d-b5b6-cd32a3d4b210" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 470.139633] env[61998]: INFO dbcounter [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Registered counter for database nova_cell0 [ 470.147951] env[61998]: INFO dbcounter [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Registered counter for database nova_cell1 [ 470.151251] env[61998]: DEBUG oslo_db.sqlalchemy.engines [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61998) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 470.151609] env[61998]: DEBUG oslo_db.sqlalchemy.engines [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61998) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 470.156402] env[61998]: ERROR nova.db.main.api [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 470.156402] env[61998]: result = function(*args, **kwargs) [ 470.156402] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 470.156402] env[61998]: return func(*args, **kwargs) [ 470.156402] env[61998]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 470.156402] env[61998]: result = fn(*args, **kwargs) [ 470.156402] env[61998]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 470.156402] env[61998]: return f(*args, **kwargs) [ 470.156402] env[61998]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 470.156402] env[61998]: return db.service_get_minimum_version(context, binaries) [ 470.156402] env[61998]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 470.156402] env[61998]: _check_db_access() [ 470.156402] env[61998]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 470.156402] env[61998]: stacktrace = ''.join(traceback.format_stack()) [ 470.156402] env[61998]: [ 470.157101] env[61998]: ERROR nova.db.main.api [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 470.157101] env[61998]: result = function(*args, **kwargs) [ 470.157101] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 470.157101] env[61998]: return func(*args, **kwargs) [ 470.157101] env[61998]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 470.157101] env[61998]: result = fn(*args, **kwargs) [ 470.157101] env[61998]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 470.157101] env[61998]: return f(*args, **kwargs) [ 470.157101] env[61998]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 470.157101] env[61998]: return db.service_get_minimum_version(context, binaries) [ 470.157101] env[61998]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 470.157101] env[61998]: _check_db_access() [ 470.157101] env[61998]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 470.157101] env[61998]: stacktrace = ''.join(traceback.format_stack()) [ 470.157101] env[61998]: [ 470.157490] env[61998]: WARNING nova.objects.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Failed to get minimum service version for cell cc503533-2f83-435d-b5b6-cd32a3d4b210 [ 470.157621] env[61998]: WARNING nova.objects.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 470.158048] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Acquiring lock "singleton_lock" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 470.158218] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Acquired lock "singleton_lock" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 470.158470] env[61998]: DEBUG oslo_concurrency.lockutils [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Releasing lock "singleton_lock" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 470.158782] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Full set of CONF: {{(pid=61998) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 470.158927] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ******************************************************************************** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 470.159078] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Configuration options gathered from: {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 470.159223] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 470.159415] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 470.159542] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ================================================================================ {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 470.159748] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] allow_resize_to_same_host = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.159917] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] arq_binding_timeout = 300 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.160061] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] backdoor_port = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.160189] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] backdoor_socket = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.160353] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] block_device_allocate_retries = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.160513] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] block_device_allocate_retries_interval = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.160680] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cert = self.pem {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.160842] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.161017] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute_monitors = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.161191] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] config_dir = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.161360] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] config_drive_format = iso9660 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.161492] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.161657] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] config_source = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.161826] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] console_host = devstack {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.161992] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] control_exchange = nova {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.162182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cpu_allocation_ratio = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.162347] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] daemon = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.162515] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] debug = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.162670] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] default_access_ip_network_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.162834] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] default_availability_zone = nova {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.162991] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] default_ephemeral_format = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.163167] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] default_green_pool_size = 1000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.163398] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.163558] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] default_schedule_zone = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.163720] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] disk_allocation_ratio = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.163873] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] enable_new_services = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.164056] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] enabled_apis = ['osapi_compute'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.164225] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] enabled_ssl_apis = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.164385] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] flat_injected = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.164542] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] force_config_drive = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.164700] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] force_raw_images = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.164871] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] graceful_shutdown_timeout = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.165041] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] heal_instance_info_cache_interval = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.165260] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] host = cpu-1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.165450] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.165600] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.165762] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.165975] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.166154] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instance_build_timeout = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.166317] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instance_delete_interval = 300 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.166487] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instance_format = [instance: %(uuid)s] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.166654] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instance_name_template = instance-%08x {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.166817] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instance_usage_audit = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.166989] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instance_usage_audit_period = month {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.167182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.167348] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.167510] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] internal_service_availability_zone = internal {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.167666] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] key = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.167826] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] live_migration_retry_count = 30 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.167994] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_color = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.168175] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_config_append = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.168342] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.168502] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_dir = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.168661] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.168792] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_options = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.168953] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_rotate_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.169132] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_rotate_interval_type = days {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.169298] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] log_rotation_type = none {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.169427] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.169553] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.169718] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.169882] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.170018] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.170183] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] long_rpc_timeout = 1800 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.170343] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] max_concurrent_builds = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.170499] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] max_concurrent_live_migrations = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.170654] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] max_concurrent_snapshots = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.170809] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] max_local_block_devices = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.170965] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] max_logfile_count = 30 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.171151] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] max_logfile_size_mb = 200 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.171315] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] maximum_instance_delete_attempts = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.171481] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] metadata_listen = 0.0.0.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.171647] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] metadata_listen_port = 8775 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.171816] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] metadata_workers = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.171977] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] migrate_max_retries = -1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.172160] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] mkisofs_cmd = genisoimage {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.172365] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.172498] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] my_ip = 10.180.1.21 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.172698] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.172862] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] network_allocate_retries = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.173057] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.173229] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.173392] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] osapi_compute_listen_port = 8774 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.173556] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] osapi_compute_unique_server_name_scope = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.173721] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] osapi_compute_workers = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.173885] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] password_length = 12 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.174063] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] periodic_enable = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.174218] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] periodic_fuzzy_delay = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.174382] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] pointer_model = usbtablet {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.174544] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] preallocate_images = none {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.174704] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] publish_errors = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.174838] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] pybasedir = /opt/stack/nova {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.174993] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ram_allocation_ratio = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.175166] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] rate_limit_burst = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.175330] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] rate_limit_except_level = CRITICAL {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.175489] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] rate_limit_interval = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.175648] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] reboot_timeout = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.175805] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] reclaim_instance_interval = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.175962] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] record = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.176158] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] reimage_timeout_per_gb = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.176328] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] report_interval = 120 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.176486] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] rescue_timeout = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.176643] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] reserved_host_cpus = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.176798] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] reserved_host_disk_mb = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.176954] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] reserved_host_memory_mb = 512 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.177127] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] reserved_huge_pages = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.177288] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] resize_confirm_window = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.177447] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] resize_fs_using_block_device = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.177604] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] resume_guests_state_on_host_boot = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.177770] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.177932] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] rpc_response_timeout = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.178104] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] run_external_periodic_tasks = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.178277] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] running_deleted_instance_action = reap {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.178436] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.178595] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] running_deleted_instance_timeout = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.178750] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler_instance_sync_interval = 120 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.178918] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_down_time = 720 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.179098] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] servicegroup_driver = db {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.179255] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] shell_completion = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.179414] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] shelved_offload_time = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.179569] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] shelved_poll_interval = 3600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.179735] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] shutdown_timeout = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.179896] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] source_is_ipv6 = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.180067] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ssl_only = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.180309] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.180478] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] sync_power_state_interval = 600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.180640] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] sync_power_state_pool_size = 1000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.180806] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] syslog_log_facility = LOG_USER {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.180966] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] tempdir = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.181154] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] timeout_nbd = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.181326] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] transport_url = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.181487] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] update_resources_interval = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.181647] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] use_cow_images = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.181806] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] use_eventlog = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.181965] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] use_journal = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.182139] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] use_json = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.182298] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] use_rootwrap_daemon = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.182454] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] use_stderr = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.182612] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] use_syslog = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.182770] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vcpu_pin_set = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.182938] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plugging_is_fatal = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.183127] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plugging_timeout = 300 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.183287] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] virt_mkfs = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.183447] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] volume_usage_poll_interval = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.183606] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] watch_log_file = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.183796] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] web = /usr/share/spice-html5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 470.183969] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.184170] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.184311] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.184482] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_concurrency.disable_process_locking = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.184764] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.184945] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.185136] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.185311] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.185478] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.185640] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.185819] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.auth_strategy = keystone {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.185989] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.compute_link_prefix = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.186182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.186359] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.dhcp_domain = novalocal {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.186528] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.enable_instance_password = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.186691] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.glance_link_prefix = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.186858] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.187039] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.187208] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.instance_list_per_project_cells = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.187369] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.list_records_by_skipping_down_cells = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.187532] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.local_metadata_per_cell = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.187699] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.max_limit = 1000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.187868] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.metadata_cache_expiration = 15 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.188053] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.neutron_default_tenant_id = default {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.188232] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.response_validation = warn {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.188398] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.use_neutron_default_nets = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.188566] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.188728] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.188895] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.189090] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.189269] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.vendordata_dynamic_targets = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.189433] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.vendordata_jsonfile_path = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.189614] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.189805] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.backend = dogpile.cache.memcached {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.189976] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.backend_argument = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.190161] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.config_prefix = cache.oslo {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.190332] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.dead_timeout = 60.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.190496] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.debug_cache_backend = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.190658] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.enable_retry_client = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.190819] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.enable_socket_keepalive = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.190992] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.enabled = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.191173] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.enforce_fips_mode = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.191338] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.expiration_time = 600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.191500] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.hashclient_retry_attempts = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.191664] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.191826] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_dead_retry = 300 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.191983] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_password = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.192157] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.192321] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.192482] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_pool_maxsize = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.192643] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.192805] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_sasl_enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.192983] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.193163] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.193324] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.memcache_username = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.193491] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.proxies = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.193653] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.redis_db = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.193827] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.redis_password = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.194016] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.194182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.194351] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.redis_server = localhost:6379 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.194513] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.redis_socket_timeout = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.194672] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.redis_username = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.194833] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.retry_attempts = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.194997] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.retry_delay = 0.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.195178] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.socket_keepalive_count = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.195330] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.socket_keepalive_idle = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.195486] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.socket_keepalive_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.195642] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.tls_allowed_ciphers = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.195800] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.tls_cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.195956] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.tls_certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.196132] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.tls_enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.196337] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cache.tls_keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.196458] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.196630] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.auth_type = password {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.196787] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.196963] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.197137] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.197303] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.197465] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.cross_az_attach = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.197626] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.debug = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.197785] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.endpoint_template = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.197950] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.http_retries = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.198165] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.198332] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.198502] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.os_region_name = RegionOne {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.198665] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.198824] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cinder.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.198998] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.199173] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.cpu_dedicated_set = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.199334] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.cpu_shared_set = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.199496] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.image_type_exclude_list = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.199659] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.199821] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.199986] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.200158] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.200329] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.200491] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.resource_provider_association_refresh = 300 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.200651] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.200811] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.shutdown_retry_interval = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.200991] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.201184] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] conductor.workers = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.201361] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] console.allowed_origins = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.201520] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] console.ssl_ciphers = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.201691] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] console.ssl_minimum_version = default {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.201860] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] consoleauth.enforce_session_timeout = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.202040] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] consoleauth.token_ttl = 600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.202212] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.202369] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.202528] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.202686] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.202843] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.203014] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.203198] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.203358] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.203516] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.203672] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.203831] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.region_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.203989] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.204161] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.204329] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.service_type = accelerator {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.204491] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.204647] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.204804] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.204963] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.205157] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.205318] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] cyborg.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.205496] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.backend = sqlalchemy {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.205665] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.connection = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.205831] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.connection_debug = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.206013] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.connection_parameters = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.206181] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.connection_recycle_time = 3600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.206344] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.connection_trace = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.206505] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.db_inc_retry_interval = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.206666] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.db_max_retries = 20 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.206829] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.db_max_retry_interval = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.206991] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.db_retry_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.207165] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.max_overflow = 50 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.207324] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.max_pool_size = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.207483] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.max_retries = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.207650] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.207809] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.mysql_wsrep_sync_wait = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.207968] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.pool_timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.208157] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.retry_interval = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.208320] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.slave_connection = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.208478] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.sqlite_synchronous = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.208639] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] database.use_db_reconnect = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.208816] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.backend = sqlalchemy {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.208984] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.connection = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.209164] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.connection_debug = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.209334] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.connection_parameters = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.209496] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.connection_recycle_time = 3600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.209657] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.connection_trace = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.209817] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.db_inc_retry_interval = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.209980] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.db_max_retries = 20 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.210156] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.db_max_retry_interval = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.210319] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.db_retry_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.210479] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.max_overflow = 50 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.210639] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.max_pool_size = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.210801] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.max_retries = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.210972] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.211147] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.211310] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.pool_timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.211471] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.retry_interval = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.211627] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.slave_connection = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.211787] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] api_database.sqlite_synchronous = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.211964] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] devices.enabled_mdev_types = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.212155] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.212326] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.212487] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ephemeral_storage_encryption.enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.212651] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.212823] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.api_servers = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.212988] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.213179] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.213347] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.213507] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.213667] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.213860] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.debug = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.214062] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.default_trusted_certificate_ids = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.214231] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.enable_certificate_validation = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.214394] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.enable_rbd_download = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.214553] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.214717] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.214894] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.215086] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.215254] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.215418] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.num_retries = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.215585] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.rbd_ceph_conf = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.215747] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.rbd_connect_timeout = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.215915] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.rbd_pool = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.216096] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.rbd_user = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.216259] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.region_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.216418] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.216573] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.216739] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.service_type = image {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.216907] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.217092] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.217263] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.217419] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.217598] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.217764] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.verify_glance_signatures = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.217925] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] glance.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.218105] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] guestfs.debug = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.218277] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.218441] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.auth_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.218597] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.218754] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.218917] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.219089] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.219252] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.219411] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.219574] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.219732] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.219889] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.220057] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.220218] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.region_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.220377] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.220534] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.220700] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.service_type = shared-file-system {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.220864] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.share_apply_policy_timeout = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.221037] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.221200] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.221355] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.221510] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.221691] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.221847] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] manila.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.222038] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] mks.enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.222452] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.222656] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] image_cache.manager_interval = 2400 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.222830] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] image_cache.precache_concurrency = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.223014] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] image_cache.remove_unused_base_images = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.223196] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.223366] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.223544] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] image_cache.subdirectory_name = _base {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.223721] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.api_max_retries = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.223911] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.api_retry_interval = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.224090] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.224261] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.auth_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.224421] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.224580] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.224739] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.224921] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.conductor_group = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.225108] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.225275] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.225432] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.225595] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.225754] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.225912] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.226083] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.226259] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.peer_list = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.226414] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.region_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.226570] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.226732] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.serial_console_state_timeout = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.226891] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.227085] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.service_type = baremetal {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.227253] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.shard = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.227418] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.227578] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.227735] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.227892] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.228086] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.228251] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ironic.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.228435] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.228607] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] key_manager.fixed_key = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.228789] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.228951] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.barbican_api_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.229123] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.barbican_endpoint = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.229295] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.barbican_endpoint_type = public {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.229451] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.barbican_region_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.229608] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.229765] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.229929] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.230099] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.230261] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.230422] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.number_of_retries = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.230580] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.retry_delay = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.230743] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.send_service_user_token = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.230904] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.231085] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.231257] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.verify_ssl = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.231418] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican.verify_ssl_path = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.231585] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.231750] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.auth_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.231907] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.232077] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.232248] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.232410] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.232567] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.232728] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.232887] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] barbican_service_user.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.233065] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.approle_role_id = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.233227] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.approle_secret_id = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.233398] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.kv_mountpoint = secret {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.233556] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.kv_path = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.233717] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.kv_version = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.233904] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.namespace = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.234078] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.root_token_id = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.234240] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.ssl_ca_crt_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.234452] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.timeout = 60.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.234568] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.use_ssl = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.234774] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.234932] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.235131] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.auth_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.235298] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.235457] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.235620] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.235797] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.235937] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.236109] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.236276] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.236433] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.236591] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.236748] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.236906] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.region_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.237078] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.237241] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.237411] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.service_type = identity {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.237575] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.237736] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.237898] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.238065] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.238249] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.238409] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] keystone.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.238610] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.connection_uri = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.238774] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.cpu_mode = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.238941] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.239136] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.cpu_models = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.239312] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.cpu_power_governor_high = performance {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.239481] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.239642] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.cpu_power_management = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.239812] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.239981] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.device_detach_attempts = 8 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.240158] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.device_detach_timeout = 20 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.240324] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.disk_cachemodes = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.240483] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.disk_prefix = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.240651] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.enabled_perf_events = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.240818] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.file_backed_memory = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.240987] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.gid_maps = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.241160] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.hw_disk_discard = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.241319] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.hw_machine_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.241484] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.images_rbd_ceph_conf = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.241661] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.241823] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.241990] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.images_rbd_glance_store_name = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.242175] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.images_rbd_pool = rbd {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.242347] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.images_type = default {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.242507] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.images_volume_group = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.242669] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.inject_key = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.242831] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.inject_partition = -2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.242994] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.inject_password = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.243170] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.iscsi_iface = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.243332] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.iser_use_multipath = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.243494] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.243655] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.243839] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_downtime = 500 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.244037] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.244214] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.244379] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_inbound_addr = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.244548] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.244701] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.244902] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_scheme = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.245115] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_timeout_action = abort {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.245286] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_tunnelled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.245446] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_uri = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.245608] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.live_migration_with_native_tls = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.245767] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.max_queues = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.246011] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.246262] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.246429] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.nfs_mount_options = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.246716] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.246888] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.247066] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.247232] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.247396] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.247557] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.num_pcie_ports = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.247723] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.247888] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.pmem_namespaces = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.248061] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.quobyte_client_cfg = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.248346] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.248519] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.248685] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.248850] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.249025] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rbd_secret_uuid = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.249201] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rbd_user = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.249368] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.249538] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.249698] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rescue_image_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.249860] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rescue_kernel_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.250029] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rescue_ramdisk_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.250205] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.250367] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.rx_queue_size = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.250536] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.smbfs_mount_options = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.250809] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.251027] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.snapshot_compression = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.251205] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.snapshot_image_format = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.251428] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.251596] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.sparse_logical_volumes = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.251760] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.swtpm_enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.251932] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.swtpm_group = tss {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.252118] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.swtpm_user = tss {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.252289] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.sysinfo_serial = unique {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.252450] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.tb_cache_size = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.252608] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.tx_queue_size = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.252772] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.uid_maps = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.252936] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.use_virtio_for_bridges = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.253134] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.virt_type = kvm {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.253309] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.volume_clear = zero {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.253475] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.volume_clear_size = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.253641] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.volume_use_multipath = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.253824] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.vzstorage_cache_path = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.254020] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.254237] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.254421] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.254646] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.254875] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.255084] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.vzstorage_mount_user = stack {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.255261] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.255437] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.255613] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.auth_type = password {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.255779] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.255938] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.256114] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.256276] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.256435] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.256605] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.default_floating_pool = public {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.256766] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.256931] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.extension_sync_interval = 600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.257104] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.http_retries = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.257270] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.257430] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.257588] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.257756] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.257914] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.258109] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.ovs_bridge = br-int {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.258285] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.physnets = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.258456] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.region_name = RegionOne {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.258616] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.258787] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.service_metadata_proxy = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.258950] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.259132] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.service_type = network {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.259297] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.259455] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.259612] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.259771] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.259952] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.260130] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] neutron.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.260304] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] notifications.bdms_in_notifications = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.260482] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] notifications.default_level = INFO {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.260659] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] notifications.notification_format = unversioned {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.260820] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] notifications.notify_on_state_change = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.260998] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.261186] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] pci.alias = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.261357] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] pci.device_spec = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.261521] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] pci.report_in_placement = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.261690] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.261863] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.auth_type = password {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.262052] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.262223] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.262382] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.262545] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.262705] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.262870] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.263085] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.default_domain_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.263256] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.default_domain_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.263417] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.domain_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.263574] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.domain_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.263732] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.263922] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.264101] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.264266] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.264426] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.264595] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.password = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.264752] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.project_domain_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.264937] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.project_domain_name = Default {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.265132] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.project_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.265309] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.project_name = service {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.265478] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.region_name = RegionOne {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.265641] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.265800] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.265972] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.service_type = placement {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.266174] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.266307] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.266465] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.266622] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.system_scope = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.266778] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.266935] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.trust_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.267119] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.user_domain_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.267295] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.user_domain_name = Default {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.267492] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.user_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.267711] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.username = nova {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.267969] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.268182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] placement.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.268369] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.cores = 20 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.268536] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.count_usage_from_placement = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.268709] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.268885] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.injected_file_content_bytes = 10240 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.269066] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.injected_file_path_length = 255 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.269243] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.injected_files = 5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.269410] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.instances = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.269576] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.key_pairs = 100 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.269742] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.metadata_items = 128 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.269906] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.ram = 51200 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.270083] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.recheck_quota = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.270255] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.server_group_members = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.270421] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] quota.server_groups = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.270591] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.270753] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.270918] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.image_metadata_prefilter = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.271090] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.271258] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.max_attempts = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.271419] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.max_placement_results = 1000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.271579] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.271742] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.271898] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.272097] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] scheduler.workers = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.272278] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.272451] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.272631] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.272800] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.272967] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.273146] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.273309] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.273497] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.273667] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.host_subset_size = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.273862] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.274052] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.274225] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.274393] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.isolated_hosts = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.274557] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.isolated_images = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.274720] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.274921] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.275116] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.275283] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.pci_in_placement = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.275448] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.275612] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.275775] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.275949] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.276125] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.276294] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.276447] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.track_instance_changes = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.276623] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.276793] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] metrics.required = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.276959] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] metrics.weight_multiplier = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.277150] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.277320] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] metrics.weight_setting = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.277640] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.277816] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] serial_console.enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.277996] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] serial_console.port_range = 10000:20000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.278183] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.278352] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.278519] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] serial_console.serialproxy_port = 6083 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.278686] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.278858] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.auth_type = password {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.279027] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.279190] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.279351] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.279510] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.279666] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.279837] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.send_service_user_token = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.279999] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.280170] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] service_user.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.280338] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.agent_enabled = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.280497] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.280802] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.280998] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.281182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.html5proxy_port = 6082 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.281345] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.image_compression = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.281505] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.jpeg_compression = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.281666] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.playback_compression = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.281849] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.require_secure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.282055] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.server_listen = 127.0.0.1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.282240] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.282401] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.streaming_mode = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.282561] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] spice.zlib_compression = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.282726] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] upgrade_levels.baseapi = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.282895] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] upgrade_levels.compute = auto {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.283070] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] upgrade_levels.conductor = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.283232] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] upgrade_levels.scheduler = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.283398] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.283559] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.283719] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.283905] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.284092] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.284259] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.284417] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.284580] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.284736] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vendordata_dynamic_auth.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.284955] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.api_retry_count = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.285120] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.ca_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.285322] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.285459] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.cluster_name = testcl1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.285620] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.connection_pool_size = 10 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.285777] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.console_delay_seconds = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.285944] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.datastore_regex = ^datastore.* {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.286185] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.286406] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.host_password = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.286528] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.host_port = 443 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.286695] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.host_username = administrator@vsphere.local {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.286889] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.insecure = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.287082] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.integration_bridge = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.287255] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.maximum_objects = 100 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.287414] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.pbm_default_policy = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.287576] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.pbm_enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.287735] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.pbm_wsdl_location = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.287900] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.288071] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.serial_port_proxy_uri = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.288229] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.serial_port_service_uri = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.288392] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.task_poll_interval = 0.5 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.288562] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.use_linked_clone = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.288728] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.vnc_keymap = en-us {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.288891] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.vnc_port = 5900 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.289065] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vmware.vnc_port_total = 10000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.289255] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.auth_schemes = ['none'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.289427] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.289726] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.289909] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.290095] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.novncproxy_port = 6080 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.290276] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.server_listen = 127.0.0.1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.290450] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.290611] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.vencrypt_ca_certs = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.290770] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.vencrypt_client_cert = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.290929] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vnc.vencrypt_client_key = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.291136] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.291307] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.disable_deep_image_inspection = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.291470] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.291632] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.291816] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.292012] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.disable_rootwrap = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.292181] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.enable_numa_live_migration = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.292344] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.292505] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.292664] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.292824] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.libvirt_disable_apic = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.292986] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.293163] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.293325] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.293487] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.293645] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.293825] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.294010] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.294182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.294342] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.294506] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.294687] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.294881] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.client_socket_timeout = 900 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.295084] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.default_pool_size = 1000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.295260] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.keep_alive = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.295429] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.max_header_line = 16384 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.295588] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.295748] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.ssl_ca_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.295908] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.ssl_cert_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.296082] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.ssl_key_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.296253] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.tcp_keepidle = 600 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.296434] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.296601] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] zvm.ca_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.296759] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] zvm.cloud_connector_url = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.297453] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.297647] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] zvm.reachable_timeout = 300 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.297837] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.enforce_new_defaults = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.298236] env[61998]: WARNING oslo_config.cfg [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 470.298427] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.enforce_scope = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.298605] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.policy_default_rule = default {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.298791] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.298999] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.policy_file = policy.yaml {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.299200] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.299367] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.299529] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.299689] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.299855] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.300050] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.300240] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.300418] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.connection_string = messaging:// {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.300585] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.enabled = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.300752] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.es_doc_type = notification {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.300917] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.es_scroll_size = 10000 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.301100] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.es_scroll_time = 2m {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.301266] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.filter_error_trace = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.301432] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.hmac_keys = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.301598] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.sentinel_service_name = mymaster {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.301772] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.socket_timeout = 0.1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.301958] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.trace_requests = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.302136] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler.trace_sqlalchemy = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.302319] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler_jaeger.process_tags = {} {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.302479] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler_jaeger.service_name_prefix = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.302640] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] profiler_otlp.service_name_prefix = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.302809] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] remote_debug.host = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.302969] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] remote_debug.port = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.303158] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.303322] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.303484] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.303646] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.303831] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.304016] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.304205] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.304372] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.304536] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.304706] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.304905] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.305110] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.305281] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.305450] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.305621] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.305790] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.305987] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.306217] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.306390] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.306591] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.306784] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.307034] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.307247] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.307437] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.307603] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.307771] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.307938] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.308128] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.308305] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.308474] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.ssl = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.308650] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.308823] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.309102] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.309347] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.309553] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.309745] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.309975] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.310183] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_notifications.retry = -1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.310377] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.310555] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.310754] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.auth_section = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.310951] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.auth_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.311161] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.cafile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.311354] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.certfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.311533] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.collect_timing = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.311696] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.connect_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.311906] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.connect_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.312172] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.endpoint_id = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.312354] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.endpoint_override = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.312524] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.insecure = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.312686] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.keyfile = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.312847] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.max_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.313022] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.min_version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.313182] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.region_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.313348] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.retriable_status_codes = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.313506] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.service_name = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.313662] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.service_type = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.313847] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.split_loggers = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.314039] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.status_code_retries = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.314215] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.status_code_retry_delay = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.314373] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.timeout = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.314532] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.valid_interfaces = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.314688] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_limit.version = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.314879] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_reports.file_event_handler = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.315074] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.315245] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] oslo_reports.log_dir = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.315416] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.315573] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.315733] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.315899] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.316119] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.316333] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.316542] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.316719] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_ovs_privileged.group = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.316886] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.317065] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.317237] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.317398] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] vif_plug_ovs_privileged.user = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.317572] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.317754] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.317931] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.318120] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.318296] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.318462] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.318628] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.318792] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.318973] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.319161] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_ovs.isolate_vif = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.319333] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.319500] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.319671] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.319837] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.320009] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] os_vif_ovs.per_port_bridge = False {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.320190] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] privsep_osbrick.capabilities = [21] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.320348] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] privsep_osbrick.group = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.320506] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] privsep_osbrick.helper_command = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.320670] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.320832] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.320989] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] privsep_osbrick.user = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.321241] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.321382] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] nova_sys_admin.group = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.321546] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] nova_sys_admin.helper_command = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.321712] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.322164] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.322164] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] nova_sys_admin.user = None {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 470.322254] env[61998]: DEBUG oslo_service.service [None req-592d2440-56b3-4e25-b835-5b528253eb22 None None] ******************************************************************************** {{(pid=61998) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 470.322695] env[61998]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 470.825911] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Getting list of instances from cluster (obj){ [ 470.825911] env[61998]: value = "domain-c8" [ 470.825911] env[61998]: _type = "ClusterComputeResource" [ 470.825911] env[61998]: } {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 470.827151] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c765bef-79e4-4366-8060-e387f078b080 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.836331] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Got total of 0 instances {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 470.836864] env[61998]: WARNING nova.virt.vmwareapi.driver [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 470.837350] env[61998]: INFO nova.virt.node [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Generated node identity c8c34fc8-902a-460e-a93a-a1e887f55ddd [ 470.837584] env[61998]: INFO nova.virt.node [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Wrote node identity c8c34fc8-902a-460e-a93a-a1e887f55ddd to /opt/stack/data/n-cpu-1/compute_id [ 471.340615] env[61998]: WARNING nova.compute.manager [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Compute nodes ['c8c34fc8-902a-460e-a93a-a1e887f55ddd'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 472.345748] env[61998]: INFO nova.compute.manager [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 473.353675] env[61998]: WARNING nova.compute.manager [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 473.354099] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 473.354214] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 473.354369] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 473.354522] env[61998]: DEBUG nova.compute.resource_tracker [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 473.355609] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5091d4-a1af-4115-a741-f4c6efefac48 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.363995] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdd56b1-646f-4255-b0b0-d925bfb4febb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.378783] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54423f3-1b86-4ee9-8832-6251121066fa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.385265] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b48fdd8-cccb-47c8-81ad-b1d1e379117c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.414177] env[61998]: DEBUG nova.compute.resource_tracker [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181369MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 473.414359] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 473.414508] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 473.916914] env[61998]: WARNING nova.compute.resource_tracker [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] No compute node record for cpu-1:c8c34fc8-902a-460e-a93a-a1e887f55ddd: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c8c34fc8-902a-460e-a93a-a1e887f55ddd could not be found. [ 474.420401] env[61998]: INFO nova.compute.resource_tracker [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c8c34fc8-902a-460e-a93a-a1e887f55ddd [ 475.928531] env[61998]: DEBUG nova.compute.resource_tracker [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 475.929040] env[61998]: DEBUG nova.compute.resource_tracker [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 476.084292] env[61998]: INFO nova.scheduler.client.report [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] [req-c2a0aebe-606e-4f7e-ab03-3b00dfe94013] Created resource provider record via placement API for resource provider with UUID c8c34fc8-902a-460e-a93a-a1e887f55ddd and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 476.099675] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6495ab14-8c08-4d3d-80a3-ceb24162e2c9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.107775] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48f7712-97f5-4de1-aa84-1ca881328fde {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.137794] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec8c245-946c-4bcf-a0a1-cc2515763306 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.145449] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef5960f-6675-4f4c-9a84-5fdf35ca54b6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.158253] env[61998]: DEBUG nova.compute.provider_tree [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Updating inventory in ProviderTree for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 476.699723] env[61998]: DEBUG nova.scheduler.client.report [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Updated inventory for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 476.700100] env[61998]: DEBUG nova.compute.provider_tree [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Updating resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd generation from 0 to 1 during operation: update_inventory {{(pid=61998) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 476.700366] env[61998]: DEBUG nova.compute.provider_tree [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Updating inventory in ProviderTree for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 476.753066] env[61998]: DEBUG nova.compute.provider_tree [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Updating resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd generation from 1 to 2 during operation: update_traits {{(pid=61998) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 477.258129] env[61998]: DEBUG nova.compute.resource_tracker [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 477.258499] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.844s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 477.258499] env[61998]: DEBUG nova.service [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Creating RPC server for service compute {{(pid=61998) start /opt/stack/nova/nova/service.py:186}} [ 477.272197] env[61998]: DEBUG nova.service [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] Join ServiceGroup membership for this service compute {{(pid=61998) start /opt/stack/nova/nova/service.py:203}} [ 477.272409] env[61998]: DEBUG nova.servicegroup.drivers.db [None req-eb3490a8-2023-45e4-a968-cfad1cf8f382 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61998) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 516.394931] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "9a13d0a1-849c-428f-ab63-29d2bd4cd60c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.395394] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "9a13d0a1-849c-428f-ab63-29d2bd4cd60c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.899287] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 517.440807] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.441336] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.443778] env[61998]: INFO nova.compute.claims [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 517.962857] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.963166] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.110928] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "2a24c5ce-9b52-49ae-b847-6280cef02eb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.111745] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "2a24c5ce-9b52-49ae-b847-6280cef02eb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.466795] env[61998]: DEBUG nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 518.555629] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56f4e00-c748-4a66-a74d-b30551d8bca6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.566379] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebaf323-76ee-43e2-b291-6fb91c5ba554 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.607864] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7503bc86-db52-4947-b548-82c41c7d5861 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.614268] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd877c82-a706-4683-89da-8ab6f3b50aed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.618425] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 518.631335] env[61998]: DEBUG nova.compute.provider_tree [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 519.007814] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.136070] env[61998]: DEBUG nova.scheduler.client.report [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 519.151221] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.642668] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.200s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.642668] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 519.646085] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.636s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.646700] env[61998]: INFO nova.compute.claims [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 520.154244] env[61998]: DEBUG nova.compute.utils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 520.157709] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 520.158134] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 520.309480] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Acquiring lock "5a590ae6-eb88-433d-81b4-33e7b6ace868" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.309747] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Lock "5a590ae6-eb88-433d-81b4-33e7b6ace868" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.594756] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "4cafaebd-b577-4a7e-92b8-151445f66a0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.595514] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "4cafaebd-b577-4a7e-92b8-151445f66a0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.665160] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 520.770755] env[61998]: DEBUG nova.policy [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45e5c7148ac343ee8674cf6747d7df0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102883704d52434591e74440e02262fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 520.789857] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e655615-543b-445c-b8c5-9550e5018007 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.799800] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfac82a5-fc1e-4f83-8c08-b66f84400f8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.841070] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 520.847607] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a84f3e-faab-45c7-b764-6b6f54073984 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.854500] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c7e0eb-d539-4ad9-b38f-00ed45d84a6a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.876714] env[61998]: DEBUG nova.compute.provider_tree [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.098394] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 521.266958] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Successfully created port: 7288c976-3fee-483a-959d-9c6bd8501c86 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.375285] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.380481] env[61998]: DEBUG nova.scheduler.client.report [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 521.622456] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.683325] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 521.721906] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 521.722492] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 521.723462] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 521.723462] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 521.723590] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 521.725726] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 521.725957] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 521.726100] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 521.726600] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 521.726816] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 521.726995] env[61998]: DEBUG nova.virt.hardware [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 521.728128] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96e637f-0c2e-4030-beb2-4612cbf85511 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.742733] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf03344-511d-4841-bfe1-70f4a2b01b1f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.763937] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0b181f-b179-4523-b01a-8777e56aaeb2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.888705] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.889831] env[61998]: DEBUG nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 521.892151] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.741s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.893564] env[61998]: INFO nova.compute.claims [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.397974] env[61998]: DEBUG nova.compute.utils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 522.401625] env[61998]: DEBUG nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Not allocating networking since 'none' was specified. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 522.902917] env[61998]: DEBUG nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 523.004437] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cd32ed-e8b2-4657-a203-ad0af14d0894 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.012543] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1e197d-d289-4557-bce5-1284083a5eab {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.044864] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053dcc9a-f6d8-4861-ae73-b95829999723 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.053030] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6776e6b0-20b4-42f7-91fd-c0cf8ec05ca9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.066683] env[61998]: DEBUG nova.compute.provider_tree [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 523.566144] env[61998]: ERROR nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. [ 523.566144] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 523.566144] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.566144] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 523.566144] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.566144] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 523.566144] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.566144] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 523.566144] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.566144] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 523.566144] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.566144] env[61998]: ERROR nova.compute.manager raise self.value [ 523.566144] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.566144] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 523.566144] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.566144] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 523.566651] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.566651] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 523.566651] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. [ 523.566651] env[61998]: ERROR nova.compute.manager [ 523.566651] env[61998]: Traceback (most recent call last): [ 523.566651] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 523.566651] env[61998]: listener.cb(fileno) [ 523.566651] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.566651] env[61998]: result = function(*args, **kwargs) [ 523.566651] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 523.566651] env[61998]: return func(*args, **kwargs) [ 523.566651] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.566651] env[61998]: raise e [ 523.566651] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.566651] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 523.566651] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.566651] env[61998]: created_port_ids = self._update_ports_for_instance( [ 523.566651] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.566651] env[61998]: with excutils.save_and_reraise_exception(): [ 523.566651] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.566651] env[61998]: self.force_reraise() [ 523.566651] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.566651] env[61998]: raise self.value [ 523.566651] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.566651] env[61998]: updated_port = self._update_port( [ 523.566651] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.566651] env[61998]: _ensure_no_port_binding_failure(port) [ 523.566651] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.566651] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 523.567403] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. [ 523.567403] env[61998]: Removing descriptor: 15 [ 523.568112] env[61998]: ERROR nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Traceback (most recent call last): [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] yield resources [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self.driver.spawn(context, instance, image_meta, [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] vm_ref = self.build_virtual_machine(instance, [ 523.568112] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] vif_infos = vmwarevif.get_vif_info(self._session, [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] for vif in network_info: [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return self._sync_wrapper(fn, *args, **kwargs) [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self.wait() [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self[:] = self._gt.wait() [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return self._exit_event.wait() [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 523.568580] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] result = hub.switch() [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return self.greenlet.switch() [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] result = function(*args, **kwargs) [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return func(*args, **kwargs) [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] raise e [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] nwinfo = self.network_api.allocate_for_instance( [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] created_port_ids = self._update_ports_for_instance( [ 523.569954] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] with excutils.save_and_reraise_exception(): [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self.force_reraise() [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] raise self.value [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] updated_port = self._update_port( [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] _ensure_no_port_binding_failure(port) [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] raise exception.PortBindingFailed(port_id=port['id']) [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] nova.exception.PortBindingFailed: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. [ 523.570593] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] [ 523.571629] env[61998]: INFO nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Terminating instance [ 523.572115] env[61998]: DEBUG nova.scheduler.client.report [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 523.578327] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.578327] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.578327] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 523.921751] env[61998]: DEBUG nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 523.949111] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.949509] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.949577] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.949858] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.949858] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.949981] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.951413] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.951588] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.951793] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.951907] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.952091] env[61998]: DEBUG nova.virt.hardware [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.952979] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1ac6fd-d75b-4f56-a423-a02abf5fe77a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.962690] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356422d8-6c04-44fe-82cf-4a88f12841c3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.978435] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 523.989342] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 523.989673] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02d81648-6e0d-4521-9212-09787ae8c50a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.000938] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Created folder: OpenStack in parent group-v4. [ 524.001145] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Creating folder: Project (caf7fdcebe8440af8fbc145f465fd8ed). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 524.001372] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff9a1275-58a4-4298-8b0d-2d79ba7873ff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.011534] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Created folder: Project (caf7fdcebe8440af8fbc145f465fd8ed) in parent group-v294665. [ 524.013234] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Creating folder: Instances. Parent ref: group-v294666. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 524.013234] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb2e1d38-326d-455e-a28f-00ebe7a76d7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.021931] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Created folder: Instances in parent group-v294666. [ 524.022168] env[61998]: DEBUG oslo.service.loopingcall [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 524.022362] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 524.023030] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b97bc62-2fc8-4e34-981a-a5d99e5a7531 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.044025] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 524.044025] env[61998]: value = "task-1388355" [ 524.044025] env[61998]: _type = "Task" [ 524.044025] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.052456] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388355, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.081758] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.189s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.083159] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 524.085804] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.710s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.090737] env[61998]: INFO nova.compute.claims [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 524.139721] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 524.388283] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.555704] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388355, 'name': CreateVM_Task, 'duration_secs': 0.302375} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 524.555889] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 524.557087] env[61998]: DEBUG oslo_vmware.service [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f1124b-6f11-4f5d-bd90-34f4cf554add {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.567357] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.571719] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.571719] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 524.571719] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4582498-1533-4a4f-8e8b-6542b88ac88b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.576050] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 524.576050] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52dfe98d-b38c-d0e9-c4da-6ee0a3f5e7ed" [ 524.576050] env[61998]: _type = "Task" [ 524.576050] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.585884] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52dfe98d-b38c-d0e9-c4da-6ee0a3f5e7ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.595307] env[61998]: DEBUG nova.compute.utils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 524.596706] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 524.596889] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 524.724937] env[61998]: DEBUG nova.policy [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98e87e38a03745df8fa2fc3f91fffd64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d65c62abaf84b00b01f9a6b3d5df366', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 524.776185] env[61998]: DEBUG nova.compute.manager [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Received event network-changed-7288c976-3fee-483a-959d-9c6bd8501c86 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 524.779472] env[61998]: DEBUG nova.compute.manager [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Refreshing instance network info cache due to event network-changed-7288c976-3fee-483a-959d-9c6bd8501c86. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 524.780569] env[61998]: DEBUG oslo_concurrency.lockutils [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] Acquiring lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.894250] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.894250] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 524.894250] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 524.894544] env[61998]: DEBUG oslo_concurrency.lockutils [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] Acquired lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.895043] env[61998]: DEBUG nova.network.neutron [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Refreshing network info cache for port 7288c976-3fee-483a-959d-9c6bd8501c86 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 524.895991] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-907a662b-b833-42db-8c9f-5864408bfb2c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.907402] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f0f970-ac73-4f37-ac5e-b12bb3e5b79b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.930598] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9a13d0a1-849c-428f-ab63-29d2bd4cd60c could not be found. [ 524.930902] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 524.931270] env[61998]: INFO nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 524.932116] env[61998]: DEBUG oslo.service.loopingcall [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 524.932394] env[61998]: DEBUG nova.compute.manager [-] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 524.932496] env[61998]: DEBUG nova.network.neutron [-] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 524.960049] env[61998]: DEBUG nova.network.neutron [-] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.089096] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.089276] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 525.089513] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.089655] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.090318] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 525.090936] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d4bb854-2e6b-4503-a361-c90f18d392d9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.100775] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 525.112069] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 525.112439] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 525.114333] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58f3202-5849-46a9-ad73-56d9b5bb2f1b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.121321] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-594993f9-5b17-494c-92fb-9eb659027d1f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.129374] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 525.129374] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523b4aa2-2116-b810-54ba-d91b1e12a3c3" [ 525.129374] env[61998]: _type = "Task" [ 525.129374] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.139126] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523b4aa2-2116-b810-54ba-d91b1e12a3c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.223613] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9cf85c-eafa-47b8-ac93-b323a70d3d4d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.230981] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd65190-08cd-4eba-9a16-7a21254d5eaa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.274966] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642bc3c2-3253-4cbc-bbeb-6a95f9122e10 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.282802] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a889b002-eefb-4679-ab76-78fd27bd81fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.298589] env[61998]: DEBUG nova.compute.provider_tree [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 525.451696] env[61998]: DEBUG nova.network.neutron [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.464451] env[61998]: DEBUG nova.network.neutron [-] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.643427] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Preparing fetch location {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 525.643779] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Creating directory with path [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 525.644512] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-316aeabb-b4bf-4d49-b272-a03612eac088 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.667047] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Created directory with path [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 525.667047] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Fetch image to [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 525.667047] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Downloading image file data a90c4a31-8bcc-48cf-ada7-7369ab14c460 to [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk on the data store datastore2 {{(pid=61998) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 525.667349] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c496e1-3442-4f2a-8552-0c213bb06182 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.678479] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f887c7ce-d321-4936-b0e2-7749839d07ec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.688953] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c21677-8a71-477c-a125-0b313e45ef64 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.724592] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f64b3d4-1889-4d48-a7d7-c00daca2b142 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.731400] env[61998]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c0221ad5-49b5-48d9-ba5b-9012ebf3e925 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.751920] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Downloading image file data a90c4a31-8bcc-48cf-ada7-7369ab14c460 to the data store datastore2 {{(pid=61998) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 525.796317] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Successfully created port: 80bd11f6-156e-47d4-b4a6-925e658bfa06 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 525.803839] env[61998]: DEBUG nova.scheduler.client.report [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 525.841498] env[61998]: DEBUG oslo_vmware.rw_handles [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61998) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 525.959834] env[61998]: DEBUG nova.network.neutron [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.969970] env[61998]: INFO nova.compute.manager [-] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Took 1.04 seconds to deallocate network for instance. [ 525.974379] env[61998]: DEBUG nova.compute.claims [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 525.974599] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.122352] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 526.163460] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 526.163729] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 526.163883] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 526.167153] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 526.167153] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 526.167318] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 526.167495] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 526.167648] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 526.169455] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 526.169455] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 526.169455] env[61998]: DEBUG nova.virt.hardware [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 526.169455] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb33866-7139-4d14-86cd-2d55e8887352 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.182364] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a74e0f-8f00-45f7-9cbf-282520cc08a2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.312204] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.312204] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 526.319679] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.697s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.321289] env[61998]: INFO nova.compute.claims [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 526.462256] env[61998]: DEBUG oslo_concurrency.lockutils [req-f4132a71-deb0-42e0-9df4-e71baa01f6d4 req-594ca1df-fcc1-421d-ac7d-0e3573659c9c service nova] Releasing lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.535932] env[61998]: DEBUG oslo_vmware.rw_handles [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Completed reading data from the image iterator. {{(pid=61998) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 526.536212] env[61998]: DEBUG oslo_vmware.rw_handles [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61998) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 526.670326] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Downloaded image file data a90c4a31-8bcc-48cf-ada7-7369ab14c460 to vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk on the data store datastore2 {{(pid=61998) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 526.672977] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Caching image {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 526.672977] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Copying Virtual Disk [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk to [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 526.672977] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-069a87e0-ad85-4356-b4df-8ef962cd301d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.680511] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 526.680511] env[61998]: value = "task-1388356" [ 526.680511] env[61998]: _type = "Task" [ 526.680511] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.690086] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.827851] env[61998]: DEBUG nova.compute.utils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 526.831384] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 526.831384] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 526.882305] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "81bbe6ae-87b2-414b-a872-4e03285abf92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.883682] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "81bbe6ae-87b2-414b-a872-4e03285abf92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.103936] env[61998]: DEBUG nova.policy [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f30af2dbdd0406095767d4c5a2d38a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d5d75286e1a46e08ed86c5a3ae84545', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 527.191163] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388356, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.337116] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 527.388584] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 527.502763] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490eace8-4837-4a49-9a27-6dfc1767e4fa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.512625] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ee7599-4f12-4225-89c3-e73c28b81de0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.548649] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a6aea-77f0-4682-81fc-dd05fc321502 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.554295] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Acquiring lock "687eaa4d-012a-4dd9-9033-16fad3650a56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.554295] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Lock "687eaa4d-012a-4dd9-9033-16fad3650a56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.561028] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a354983-598b-4f27-910a-f0eeedfa24c5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.576481] env[61998]: DEBUG nova.compute.provider_tree [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 527.696979] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388356, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647014} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.697392] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Copied Virtual Disk [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk to [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 527.697601] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Deleting the datastore file [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 527.697926] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2a320d8-8fe3-4427-bee8-3bb85f6d8758 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.704845] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 527.704845] env[61998]: value = "task-1388357" [ 527.704845] env[61998]: _type = "Task" [ 527.704845] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.714774] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388357, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.921894] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.063048] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 528.082275] env[61998]: DEBUG nova.scheduler.client.report [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 528.214878] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388357, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023551} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.215124] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 528.215263] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Moving file from [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf/a90c4a31-8bcc-48cf-ada7-7369ab14c460 to [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460. {{(pid=61998) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 528.215544] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-6f02ddd7-0153-494d-8995-3f6908e58b65 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.224493] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 528.224493] env[61998]: value = "task-1388358" [ 528.224493] env[61998]: _type = "Task" [ 528.224493] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.235394] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388358, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.357052] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 528.390403] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 528.390603] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 528.390794] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 528.391445] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 528.391445] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 528.391445] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 528.391445] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 528.391656] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 528.391762] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 528.391925] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 528.392195] env[61998]: DEBUG nova.virt.hardware [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 528.393097] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e62d046-777a-4dda-b72d-607979702a3e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.406830] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49387484-95db-44fb-91f3-f2f1161880d9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.587803] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.588321] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 528.593637] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.616s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.594185] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.738475] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388358, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.026588} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.738879] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] File moved {{(pid=61998) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 528.739479] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Cleaning up location [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 528.739575] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Deleting the datastore file [datastore2] vmware_temp/1fb19572-607d-454c-8a61-92183aeacbbf {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 528.739854] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ab12a35-7b6e-4a40-9c63-683b3a5a9aa3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.747645] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 528.747645] env[61998]: value = "task-1388359" [ 528.747645] env[61998]: _type = "Task" [ 528.747645] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.764541] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.937822] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Successfully created port: 180ea470-060a-4016-b570-1e5bf3515605 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 528.950750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Acquiring lock "d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.950984] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Lock "d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.096033] env[61998]: DEBUG nova.compute.utils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 529.098132] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 529.098132] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 529.263639] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025423} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.263639] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 529.264498] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ffa2327-52b1-4177-8386-95df4a8088b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.270285] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 529.270285] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]527a97c1-d3c8-f3c8-f7ce-bfd1ae256f5d" [ 529.270285] env[61998]: _type = "Task" [ 529.270285] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.272374] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb9f1eb-7e99-44ff-bad1-0acd99ec3413 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.284165] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02335155-9605-4e56-889f-49aec43a9b18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.296933] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]527a97c1-d3c8-f3c8-f7ce-bfd1ae256f5d, 'name': SearchDatastore_Task, 'duration_secs': 0.011457} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.323127] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.323127] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 2aabbd53-4c4d-4b53-8135-34cc5a17fd47/2aabbd53-4c4d-4b53-8135-34cc5a17fd47.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 529.323454] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b178e30-ed14-4d0f-8ab1-21ca4b7eff32 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.326175] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb2a33c-610b-463e-bb5b-2fb9f6e573aa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.335796] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6689ffb2-db88-443e-932f-2a53b99a1797 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.339882] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 529.339882] env[61998]: value = "task-1388360" [ 529.339882] env[61998]: _type = "Task" [ 529.339882] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.353807] env[61998]: DEBUG nova.compute.provider_tree [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 529.361257] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.458038] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 529.474075] env[61998]: DEBUG nova.policy [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f69fb51cac434f25ad53ba9bfe8d480f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7146aa9cae39448ca92f4f966e1d4daf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 529.606629] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 529.858951] env[61998]: DEBUG nova.scheduler.client.report [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 529.864248] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388360, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.991264] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.047026] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Acquiring lock "f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.047492] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Lock "f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.274454] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.274775] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.274984] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 530.275656] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Rebuilding the list of instances to heal {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10231}} [ 530.351072] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388360, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.367914] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.776s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.367914] env[61998]: ERROR nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. [ 530.367914] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Traceback (most recent call last): [ 530.367914] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 530.367914] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self.driver.spawn(context, instance, image_meta, [ 530.367914] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 530.367914] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.367914] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.367914] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] vm_ref = self.build_virtual_machine(instance, [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] for vif in network_info: [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return self._sync_wrapper(fn, *args, **kwargs) [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self.wait() [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self[:] = self._gt.wait() [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return self._exit_event.wait() [ 530.368237] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] result = hub.switch() [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return self.greenlet.switch() [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] result = function(*args, **kwargs) [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] return func(*args, **kwargs) [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] raise e [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] nwinfo = self.network_api.allocate_for_instance( [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.368570] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] created_port_ids = self._update_ports_for_instance( [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] with excutils.save_and_reraise_exception(): [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] self.force_reraise() [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] raise self.value [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] updated_port = self._update_port( [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] _ensure_no_port_binding_failure(port) [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.368942] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] raise exception.PortBindingFailed(port_id=port['id']) [ 530.369265] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] nova.exception.PortBindingFailed: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. [ 530.369265] env[61998]: ERROR nova.compute.manager [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] [ 530.369265] env[61998]: DEBUG nova.compute.utils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 530.370111] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.448s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.372144] env[61998]: INFO nova.compute.claims [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 530.379698] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Build of instance 9a13d0a1-849c-428f-ab63-29d2bd4cd60c was re-scheduled: Binding failed for port 7288c976-3fee-483a-959d-9c6bd8501c86, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 530.380507] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 530.380849] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.381210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.381477] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 530.550987] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 530.619613] env[61998]: ERROR nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. [ 530.619613] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 530.619613] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 530.619613] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 530.619613] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.619613] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 530.619613] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.619613] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 530.619613] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.619613] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 530.619613] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.619613] env[61998]: ERROR nova.compute.manager raise self.value [ 530.619613] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.619613] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 530.619613] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.619613] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 530.620039] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.620039] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 530.620039] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. [ 530.620039] env[61998]: ERROR nova.compute.manager [ 530.620752] env[61998]: Traceback (most recent call last): [ 530.620807] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 530.620807] env[61998]: listener.cb(fileno) [ 530.620807] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.620807] env[61998]: result = function(*args, **kwargs) [ 530.620807] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 530.620807] env[61998]: return func(*args, **kwargs) [ 530.620807] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 530.620807] env[61998]: raise e [ 530.620807] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 530.620807] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 530.620807] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.620807] env[61998]: created_port_ids = self._update_ports_for_instance( [ 530.620807] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.620807] env[61998]: with excutils.save_and_reraise_exception(): [ 530.620807] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.620807] env[61998]: self.force_reraise() [ 530.620807] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.620807] env[61998]: raise self.value [ 530.620807] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.620807] env[61998]: updated_port = self._update_port( [ 530.620807] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.620807] env[61998]: _ensure_no_port_binding_failure(port) [ 530.620807] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.620807] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 530.621498] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. [ 530.621498] env[61998]: Removing descriptor: 15 [ 530.622956] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 530.625514] env[61998]: ERROR nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Traceback (most recent call last): [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] yield resources [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self.driver.spawn(context, instance, image_meta, [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] vm_ref = self.build_virtual_machine(instance, [ 530.625514] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] for vif in network_info: [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return self._sync_wrapper(fn, *args, **kwargs) [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self.wait() [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self[:] = self._gt.wait() [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return self._exit_event.wait() [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.625842] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] result = hub.switch() [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return self.greenlet.switch() [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] result = function(*args, **kwargs) [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return func(*args, **kwargs) [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] raise e [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] nwinfo = self.network_api.allocate_for_instance( [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] created_port_ids = self._update_ports_for_instance( [ 530.626181] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] with excutils.save_and_reraise_exception(): [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self.force_reraise() [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] raise self.value [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] updated_port = self._update_port( [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] _ensure_no_port_binding_failure(port) [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] raise exception.PortBindingFailed(port_id=port['id']) [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] nova.exception.PortBindingFailed: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. [ 530.626558] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] [ 530.626979] env[61998]: INFO nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Terminating instance [ 530.629465] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.629465] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquired lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.629635] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 530.668392] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 530.668634] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 530.668784] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 530.668957] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 530.669241] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 530.669313] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 530.669458] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 530.669609] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 530.669763] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 530.669915] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 530.670105] env[61998]: DEBUG nova.virt.hardware [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 530.671388] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d5af04-970d-4256-a385-6896b258b4e7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.679864] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b06d51-8370-4236-8e78-70daccee7abe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.780187] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 530.780615] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 530.780615] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 530.780615] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 530.780764] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Didn't find any instances for network info cache update. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 530.780923] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.782030] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.782275] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.784773] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.784773] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.784773] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 530.853098] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388360, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.965137] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.080949] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.288549] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Getting list of instances from cluster (obj){ [ 531.288549] env[61998]: value = "domain-c8" [ 531.288549] env[61998]: _type = "ClusterComputeResource" [ 531.288549] env[61998]: } {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 531.289621] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad623e9-5685-4c5c-803c-0dc16afe7cfa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.301600] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Got total of 1 instances {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 531.302490] env[61998]: WARNING nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] While synchronizing instance power states, found 5 instances in the database and 1 instances on the hypervisor. [ 531.302490] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Triggering sync for uuid 2aabbd53-4c4d-4b53-8135-34cc5a17fd47 {{(pid=61998) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10637}} [ 531.302490] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Triggering sync for uuid 2a24c5ce-9b52-49ae-b847-6280cef02eb5 {{(pid=61998) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10637}} [ 531.302490] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Triggering sync for uuid 5a590ae6-eb88-433d-81b4-33e7b6ace868 {{(pid=61998) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10637}} [ 531.302490] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Triggering sync for uuid 4cafaebd-b577-4a7e-92b8-151445f66a0d {{(pid=61998) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10637}} [ 531.302693] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Triggering sync for uuid 81bbe6ae-87b2-414b-a872-4e03285abf92 {{(pid=61998) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10637}} [ 531.302886] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.303441] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "2a24c5ce-9b52-49ae-b847-6280cef02eb5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.304096] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "5a590ae6-eb88-433d-81b4-33e7b6ace868" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.304340] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "4cafaebd-b577-4a7e-92b8-151445f66a0d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.304559] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "81bbe6ae-87b2-414b-a872-4e03285abf92" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.304913] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.305136] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 531.305293] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.315866] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.353785] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388360, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.629896} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.353785] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 2aabbd53-4c4d-4b53-8135-34cc5a17fd47/2aabbd53-4c4d-4b53-8135-34cc5a17fd47.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 531.353785] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 531.354104] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7506f17a-ea03-4441-bb66-2376037eb924 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.363862] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 531.363862] env[61998]: value = "task-1388361" [ 531.363862] env[61998]: _type = "Task" [ 531.363862] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.372034] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.517566] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.520116] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.594371] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcd2b1c-4917-4167-ac5e-fbd6076031f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.602277] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adf27b1-3503-46aa-bacb-ff2b3089055e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.638784] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be9bdc7-2b66-4189-b94c-8634500ea999 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.653483] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d814d38c-3f2a-43bf-b851-a1b028720a08 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.659682] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Successfully created port: 237ef56f-98dd-4c68-8be3-7f0387c010ff {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.675938] env[61998]: DEBUG nova.compute.provider_tree [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 531.683074] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Acquiring lock "782f1eba-459c-4249-b74a-128b22c64ca3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.683741] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Lock "782f1eba-459c-4249-b74a-128b22c64ca3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.809702] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.873944] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095037} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.874242] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 531.875445] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dd1fd3-c226-45ab-9cda-c34040063c3e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.899093] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 2aabbd53-4c4d-4b53-8135-34cc5a17fd47/2aabbd53-4c4d-4b53-8135-34cc5a17fd47.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 531.899247] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65243a33-0904-44a2-bc78-e86b778038e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.922937] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 531.922937] env[61998]: value = "task-1388362" [ 531.922937] env[61998]: _type = "Task" [ 531.922937] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.931873] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.027146] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Releasing lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.028731] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 532.028731] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 532.028731] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "refresh_cache-9a13d0a1-849c-428f-ab63-29d2bd4cd60c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.028731] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 532.028731] env[61998]: DEBUG nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 532.029170] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 532.030758] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3473ec9f-5036-4b46-92d3-9655edfc9e1e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.041962] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5854835-bc02-4691-8e99-c1dab2f1ecba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.067213] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.076601] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2a24c5ce-9b52-49ae-b847-6280cef02eb5 could not be found. [ 532.077139] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 532.077435] env[61998]: INFO nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 532.077815] env[61998]: DEBUG oslo.service.loopingcall [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 532.078378] env[61998]: DEBUG nova.compute.manager [-] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 532.078587] env[61998]: DEBUG nova.network.neutron [-] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 532.137582] env[61998]: DEBUG nova.network.neutron [-] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.186074] env[61998]: DEBUG nova.scheduler.client.report [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 532.190009] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 532.440115] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388362, 'name': ReconfigVM_Task, 'duration_secs': 0.366823} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.440613] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 2aabbd53-4c4d-4b53-8135-34cc5a17fd47/2aabbd53-4c4d-4b53-8135-34cc5a17fd47.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 532.441148] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcf83ff8-54eb-4846-987b-63823761eb1c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.451027] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 532.451027] env[61998]: value = "task-1388363" [ 532.451027] env[61998]: _type = "Task" [ 532.451027] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.461387] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388363, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.579443] env[61998]: DEBUG nova.network.neutron [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.646652] env[61998]: DEBUG nova.network.neutron [-] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.697347] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.697844] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 532.708066] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.113s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.709978] env[61998]: INFO nova.compute.claims [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.744278] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.964872] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388363, 'name': Rename_Task, 'duration_secs': 0.126124} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.964872] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 532.964872] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb7d5a4d-e2a5-4c99-a96a-59d99d1060d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.972428] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 532.972428] env[61998]: value = "task-1388364" [ 532.972428] env[61998]: _type = "Task" [ 532.972428] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.985303] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.081719] env[61998]: INFO nova.compute.manager [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: 9a13d0a1-849c-428f-ab63-29d2bd4cd60c] Took 1.05 seconds to deallocate network for instance. [ 533.149972] env[61998]: INFO nova.compute.manager [-] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Took 1.07 seconds to deallocate network for instance. [ 533.151443] env[61998]: DEBUG nova.compute.claims [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 533.151607] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.220738] env[61998]: DEBUG nova.compute.utils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 533.226403] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 533.226403] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 533.297683] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Acquiring lock "ac0209e5-66d0-4a04-892d-85eba3c3663a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.297683] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Lock "ac0209e5-66d0-4a04-892d-85eba3c3663a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.375329] env[61998]: DEBUG nova.compute.manager [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Received event network-changed-80bd11f6-156e-47d4-b4a6-925e658bfa06 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 533.375579] env[61998]: DEBUG nova.compute.manager [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Refreshing instance network info cache due to event network-changed-80bd11f6-156e-47d4-b4a6-925e658bfa06. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 533.375773] env[61998]: DEBUG oslo_concurrency.lockutils [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] Acquiring lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.376112] env[61998]: DEBUG oslo_concurrency.lockutils [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] Acquired lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.376764] env[61998]: DEBUG nova.network.neutron [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Refreshing network info cache for port 80bd11f6-156e-47d4-b4a6-925e658bfa06 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 533.445946] env[61998]: ERROR nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. [ 533.445946] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 533.445946] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 533.445946] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 533.445946] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.445946] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 533.445946] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.445946] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 533.445946] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.445946] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 533.445946] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.445946] env[61998]: ERROR nova.compute.manager raise self.value [ 533.445946] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.445946] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 533.445946] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.445946] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 533.446833] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.446833] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 533.446833] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. [ 533.446833] env[61998]: ERROR nova.compute.manager [ 533.446833] env[61998]: Traceback (most recent call last): [ 533.446833] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 533.446833] env[61998]: listener.cb(fileno) [ 533.446833] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.446833] env[61998]: result = function(*args, **kwargs) [ 533.446833] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 533.446833] env[61998]: return func(*args, **kwargs) [ 533.446833] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 533.446833] env[61998]: raise e [ 533.446833] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 533.446833] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 533.447221] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.447221] env[61998]: created_port_ids = self._update_ports_for_instance( [ 533.447221] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.447221] env[61998]: with excutils.save_and_reraise_exception(): [ 533.447221] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.447221] env[61998]: self.force_reraise() [ 533.447221] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.447221] env[61998]: raise self.value [ 533.447221] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.447221] env[61998]: updated_port = self._update_port( [ 533.447221] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.447221] env[61998]: _ensure_no_port_binding_failure(port) [ 533.447221] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.447221] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 533.447221] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. [ 533.447221] env[61998]: Removing descriptor: 17 [ 533.451050] env[61998]: ERROR nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Traceback (most recent call last): [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] yield resources [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self.driver.spawn(context, instance, image_meta, [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] vm_ref = self.build_virtual_machine(instance, [ 533.451050] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] for vif in network_info: [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return self._sync_wrapper(fn, *args, **kwargs) [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self.wait() [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self[:] = self._gt.wait() [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return self._exit_event.wait() [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.451347] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] result = hub.switch() [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return self.greenlet.switch() [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] result = function(*args, **kwargs) [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return func(*args, **kwargs) [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] raise e [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] nwinfo = self.network_api.allocate_for_instance( [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] created_port_ids = self._update_ports_for_instance( [ 533.451696] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] with excutils.save_and_reraise_exception(): [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self.force_reraise() [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] raise self.value [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] updated_port = self._update_port( [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] _ensure_no_port_binding_failure(port) [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] raise exception.PortBindingFailed(port_id=port['id']) [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] nova.exception.PortBindingFailed: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. [ 533.452101] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] [ 533.452460] env[61998]: INFO nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Terminating instance [ 533.452460] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Acquiring lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.452460] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Acquired lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.452460] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.488083] env[61998]: DEBUG oslo_vmware.api [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388364, 'name': PowerOnVM_Task, 'duration_secs': 0.425155} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.488386] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 533.488632] env[61998]: INFO nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Took 9.57 seconds to spawn the instance on the hypervisor. [ 533.488886] env[61998]: DEBUG nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 533.489622] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82c83bf-6651-4802-9a21-83a3be7bee75 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.503278] env[61998]: DEBUG nova.policy [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274f384481b44caf9a114ca29022c231', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '067efe2b964d498fa7e2843439f023ca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 533.738292] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 533.938858] env[61998]: DEBUG nova.network.neutron [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.957961] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611cf478-0b29-43ac-8ff8-98d464dc4c7a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.967145] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed06935c-8a83-43c3-9931-cdc11a832563 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.014930] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.019982] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7009f1d0-fd07-46ce-87fd-baa6725edf8a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.030502] env[61998]: INFO nova.compute.manager [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Took 15.05 seconds to build instance. [ 534.038318] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a5b320-ec30-45ad-a65e-114904beb6d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.051852] env[61998]: DEBUG nova.compute.provider_tree [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.118214] env[61998]: INFO nova.scheduler.client.report [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Deleted allocations for instance 9a13d0a1-849c-428f-ab63-29d2bd4cd60c [ 534.227217] env[61998]: DEBUG nova.network.neutron [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.249762] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.537451] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9662cf0-280a-4a62-b4e8-991285601a4c tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.574s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.540291] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.237s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.540291] env[61998]: INFO nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] During sync_power_state the instance has a pending task (spawning). Skip. [ 534.540537] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.555460] env[61998]: DEBUG nova.scheduler.client.report [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 534.631374] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d5ff6090-b166-474a-8b32-5d5ae9963b69 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "9a13d0a1-849c-428f-ab63-29d2bd4cd60c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.236s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.677351] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Successfully created port: 6c9256dc-277c-45e9-aac5-1754cf36080b {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 534.725514] env[61998]: DEBUG nova.compute.manager [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Received event network-changed-180ea470-060a-4016-b570-1e5bf3515605 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 534.725514] env[61998]: DEBUG nova.compute.manager [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Refreshing instance network info cache due to event network-changed-180ea470-060a-4016-b570-1e5bf3515605. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 534.725928] env[61998]: DEBUG oslo_concurrency.lockutils [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] Acquiring lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.732640] env[61998]: DEBUG oslo_concurrency.lockutils [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] Releasing lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.732640] env[61998]: DEBUG nova.compute.manager [req-922b9609-6d51-4e5a-9b4d-9cf479171a73 req-a5fad589-85f9-4a5e-bf61-19f1448b7a1f service nova] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Received event network-vif-deleted-80bd11f6-156e-47d4-b4a6-925e658bfa06 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 534.752262] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 534.759339] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Releasing lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.762132] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 534.762132] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 534.762132] env[61998]: DEBUG oslo_concurrency.lockutils [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] Acquired lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.762132] env[61998]: DEBUG nova.network.neutron [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Refreshing network info cache for port 180ea470-060a-4016-b570-1e5bf3515605 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 534.771472] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abdb8c28-8bab-4cd2-91b9-e853765337bb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.779495] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ccb0a5-1ce2-4714-af7f-599473751f8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.812492] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 534.812840] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 534.813560] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 534.813773] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 534.813927] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 534.814091] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 534.814309] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 534.814462] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 534.814643] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 534.814778] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 534.814946] env[61998]: DEBUG nova.virt.hardware [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 534.816867] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da31ebed-7532-4287-8b00-10e80df20433 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.826870] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5a590ae6-eb88-433d-81b4-33e7b6ace868 could not be found. [ 534.827116] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 534.827309] env[61998]: INFO nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Took 0.07 seconds to destroy the instance on the hypervisor. [ 534.827570] env[61998]: DEBUG oslo.service.loopingcall [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.828251] env[61998]: DEBUG nova.compute.manager [-] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 534.828343] env[61998]: DEBUG nova.network.neutron [-] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.834047] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c329b3aa-1012-4f8e-86ce-74edb8de7b18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.873571] env[61998]: DEBUG nova.network.neutron [-] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.045135] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 535.061951] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.062442] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 535.066756] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.076s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.069151] env[61998]: INFO nova.compute.claims [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.335228] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Acquiring lock "0fd8d146-79fe-4e2a-90a3-67d457fc570f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.336055] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Lock "0fd8d146-79fe-4e2a-90a3-67d457fc570f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.379980] env[61998]: DEBUG nova.network.neutron [-] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.489825] env[61998]: DEBUG nova.network.neutron [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.574773] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.582223] env[61998]: DEBUG nova.compute.utils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 535.587941] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 535.588205] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 535.739302] env[61998]: DEBUG nova.network.neutron [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.792330] env[61998]: DEBUG nova.policy [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '86f109980703481eb3dae2539fec9359', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f822e6b82c42b0a520b693150ebc2b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 535.839714] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 535.885129] env[61998]: INFO nova.compute.manager [-] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Took 1.06 seconds to deallocate network for instance. [ 535.891823] env[61998]: DEBUG nova.compute.claims [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 535.891823] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.998212] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "e4ada227-b79a-457a-b063-dde99840aa14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.998940] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "e4ada227-b79a-457a-b063-dde99840aa14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.088984] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 536.120229] env[61998]: ERROR nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. [ 536.120229] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.120229] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 536.120229] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.120229] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.120229] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.120229] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.120229] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.120229] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.120229] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 536.120229] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.120229] env[61998]: ERROR nova.compute.manager raise self.value [ 536.120229] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.120229] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.120229] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.120229] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.121696] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.121696] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.121696] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. [ 536.121696] env[61998]: ERROR nova.compute.manager [ 536.121696] env[61998]: Traceback (most recent call last): [ 536.121696] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.121696] env[61998]: listener.cb(fileno) [ 536.121696] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.121696] env[61998]: result = function(*args, **kwargs) [ 536.121696] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.121696] env[61998]: return func(*args, **kwargs) [ 536.121696] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 536.121696] env[61998]: raise e [ 536.121696] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 536.121696] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 536.121696] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.121696] env[61998]: created_port_ids = self._update_ports_for_instance( [ 536.121696] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.121696] env[61998]: with excutils.save_and_reraise_exception(): [ 536.121696] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.121696] env[61998]: self.force_reraise() [ 536.121696] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.121696] env[61998]: raise self.value [ 536.121696] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.121696] env[61998]: updated_port = self._update_port( [ 536.121696] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.121696] env[61998]: _ensure_no_port_binding_failure(port) [ 536.121696] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.121696] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.123582] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. [ 536.123582] env[61998]: Removing descriptor: 16 [ 536.123582] env[61998]: ERROR nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Traceback (most recent call last): [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] yield resources [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self.driver.spawn(context, instance, image_meta, [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.123582] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] vm_ref = self.build_virtual_machine(instance, [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] for vif in network_info: [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return self._sync_wrapper(fn, *args, **kwargs) [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self.wait() [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self[:] = self._gt.wait() [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return self._exit_event.wait() [ 536.124129] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] result = hub.switch() [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return self.greenlet.switch() [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] result = function(*args, **kwargs) [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return func(*args, **kwargs) [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] raise e [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] nwinfo = self.network_api.allocate_for_instance( [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.124606] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] created_port_ids = self._update_ports_for_instance( [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] with excutils.save_and_reraise_exception(): [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self.force_reraise() [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] raise self.value [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] updated_port = self._update_port( [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] _ensure_no_port_binding_failure(port) [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.124915] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] raise exception.PortBindingFailed(port_id=port['id']) [ 536.125220] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] nova.exception.PortBindingFailed: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. [ 536.125220] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] [ 536.125220] env[61998]: INFO nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Terminating instance [ 536.127744] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.127861] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquired lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.128025] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 536.250772] env[61998]: DEBUG oslo_concurrency.lockutils [req-c3d82ab3-bffe-4bb3-a29d-739fc40dc83d req-f14fe18d-eba5-4a8e-9314-9a73a2959fb8 service nova] Releasing lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.338630] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85296729-dd96-4bc1-9fae-d8d6843ad954 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.351604] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1ce8fb-771f-43a4-8da4-60b8d9dd6b49 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.388559] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.389480] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b52de3-1d65-4954-b7de-2a46de118312 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.398124] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513ae0f5-ef0a-4552-a6dc-5f9f2f96cf48 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.411837] env[61998]: DEBUG nova.compute.provider_tree [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.675796] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.835375] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.916344] env[61998]: DEBUG nova.scheduler.client.report [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 536.928778] env[61998]: DEBUG nova.compute.manager [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Received event network-changed-237ef56f-98dd-4c68-8be3-7f0387c010ff {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 536.928961] env[61998]: DEBUG nova.compute.manager [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Refreshing instance network info cache due to event network-changed-237ef56f-98dd-4c68-8be3-7f0387c010ff. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 536.929233] env[61998]: DEBUG oslo_concurrency.lockutils [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] Acquiring lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.053907] env[61998]: DEBUG nova.compute.manager [None req-a66cd1d1-61ab-4d5f-ace6-7849693f7a98 tempest-ServerDiagnosticsV248Test-2030743529 tempest-ServerDiagnosticsV248Test-2030743529-project-admin] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 537.054862] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac392c8b-13b5-40b3-8025-d24b2f379188 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.065139] env[61998]: INFO nova.compute.manager [None req-a66cd1d1-61ab-4d5f-ace6-7849693f7a98 tempest-ServerDiagnosticsV248Test-2030743529 tempest-ServerDiagnosticsV248Test-2030743529-project-admin] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Retrieving diagnostics [ 537.066012] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a3467b-ee49-4e48-a444-20003c9f2dc9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.103824] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 537.133580] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 537.133848] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 537.134010] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.134349] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 537.134349] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.134502] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 537.134737] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 537.134829] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 537.134981] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 537.135739] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 537.135965] env[61998]: DEBUG nova.virt.hardware [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 537.136876] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e76329-b276-42b6-b80f-cc90d7cb7e1a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.140327] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Successfully created port: c8e229b1-2459-48bd-81ff-487cca8a8b31 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.148282] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79ad506-db59-4aa1-8b82-43716c991b3d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.343253] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Releasing lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.343253] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 537.343253] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 537.343253] env[61998]: DEBUG oslo_concurrency.lockutils [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] Acquired lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.343253] env[61998]: DEBUG nova.network.neutron [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Refreshing network info cache for port 237ef56f-98dd-4c68-8be3-7f0387c010ff {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 537.343441] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-491918eb-a3cf-4a4a-926b-7783784707d5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.354835] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7619160b-ae0b-4dbc-8058-c179610383fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.381577] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4cafaebd-b577-4a7e-92b8-151445f66a0d could not be found. [ 537.381882] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 537.382430] env[61998]: INFO nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 537.382430] env[61998]: DEBUG oslo.service.loopingcall [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.382652] env[61998]: DEBUG nova.compute.manager [-] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 537.382762] env[61998]: DEBUG nova.network.neutron [-] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 537.423118] env[61998]: DEBUG nova.network.neutron [-] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.428054] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.428054] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 537.432059] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.352s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.433548] env[61998]: INFO nova.compute.claims [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 537.908943] env[61998]: DEBUG nova.network.neutron [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.927935] env[61998]: DEBUG nova.network.neutron [-] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.939522] env[61998]: DEBUG nova.compute.utils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 537.945654] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 537.946399] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 537.986112] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "ef129347-9ea0-4615-b897-f51e664da1a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.986608] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "ef129347-9ea0-4615-b897-f51e664da1a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.049362] env[61998]: DEBUG nova.policy [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f216a76b65a745d79dbb0fdc97b6b31c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e57fb074f4004827b9b503aa277209c2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 538.088040] env[61998]: DEBUG nova.network.neutron [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.279843] env[61998]: DEBUG nova.compute.manager [req-5c8f3b90-8113-42d4-845f-da376f0bcd3a req-6ec36635-657c-4447-9a7e-c1f54475dc34 service nova] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Received event network-vif-deleted-180ea470-060a-4016-b570-1e5bf3515605 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 538.433030] env[61998]: INFO nova.compute.manager [-] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Took 1.05 seconds to deallocate network for instance. [ 538.436774] env[61998]: DEBUG nova.compute.claims [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 538.437388] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.448040] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 538.482020] env[61998]: ERROR nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. [ 538.482020] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 538.482020] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 538.482020] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 538.482020] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 538.482020] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 538.482020] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 538.482020] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 538.482020] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.482020] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 538.482020] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.482020] env[61998]: ERROR nova.compute.manager raise self.value [ 538.482020] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 538.482020] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 538.482020] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.482020] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 538.482496] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.482496] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 538.482496] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. [ 538.482496] env[61998]: ERROR nova.compute.manager [ 538.482496] env[61998]: Traceback (most recent call last): [ 538.482496] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 538.482496] env[61998]: listener.cb(fileno) [ 538.482496] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.482496] env[61998]: result = function(*args, **kwargs) [ 538.482496] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 538.482496] env[61998]: return func(*args, **kwargs) [ 538.482496] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 538.482496] env[61998]: raise e [ 538.482496] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 538.482496] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 538.482496] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 538.482496] env[61998]: created_port_ids = self._update_ports_for_instance( [ 538.482496] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 538.482496] env[61998]: with excutils.save_and_reraise_exception(): [ 538.482496] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.482496] env[61998]: self.force_reraise() [ 538.482496] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.482496] env[61998]: raise self.value [ 538.482496] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 538.482496] env[61998]: updated_port = self._update_port( [ 538.482496] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.482496] env[61998]: _ensure_no_port_binding_failure(port) [ 538.482496] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.482496] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 538.483336] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. [ 538.483336] env[61998]: Removing descriptor: 15 [ 538.483336] env[61998]: ERROR nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Traceback (most recent call last): [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] yield resources [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self.driver.spawn(context, instance, image_meta, [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self._vmops.spawn(context, instance, image_meta, injected_files, [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 538.483336] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] vm_ref = self.build_virtual_machine(instance, [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] vif_infos = vmwarevif.get_vif_info(self._session, [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] for vif in network_info: [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return self._sync_wrapper(fn, *args, **kwargs) [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self.wait() [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self[:] = self._gt.wait() [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return self._exit_event.wait() [ 538.483702] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] result = hub.switch() [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return self.greenlet.switch() [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] result = function(*args, **kwargs) [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return func(*args, **kwargs) [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] raise e [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] nwinfo = self.network_api.allocate_for_instance( [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 538.484046] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] created_port_ids = self._update_ports_for_instance( [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] with excutils.save_and_reraise_exception(): [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self.force_reraise() [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] raise self.value [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] updated_port = self._update_port( [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] _ensure_no_port_binding_failure(port) [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.484417] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] raise exception.PortBindingFailed(port_id=port['id']) [ 538.484758] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] nova.exception.PortBindingFailed: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. [ 538.484758] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] [ 538.484758] env[61998]: INFO nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Terminating instance [ 538.489050] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.489050] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquired lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.489050] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 538.595722] env[61998]: DEBUG oslo_concurrency.lockutils [req-6b336009-5452-4edf-8690-f10e6e853c1e req-a2d72fb5-c4f6-46ff-8d61-6de90c900029 service nova] Releasing lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.724606] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736eedd7-256c-4c70-93ea-fcb3b3e3fea7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.738329] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3cac73-7bbf-402b-9aee-cf391d990b03 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.783831] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483911ff-f540-4a8a-b043-5a6f0a801cbc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.790469] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef072a1-6ca9-4e6c-9ccf-c4a94885a34f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.810444] env[61998]: DEBUG nova.compute.provider_tree [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.149912] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 539.151355] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Successfully created port: b2f2be33-0c12-458e-a815-2fa188123fbb {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.316308] env[61998]: DEBUG nova.scheduler.client.report [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 539.432282] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.460159] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 539.503978] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.504117] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.504334] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.504613] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.504812] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.504902] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.505623] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.505623] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.505623] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.505623] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.505623] env[61998]: DEBUG nova.virt.hardware [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.506732] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21eeb432-9bb9-4b1f-9c79-293d1a602e66 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.516089] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319bb67f-6de0-4230-9132-dc2116a87d08 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.820154] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.388s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.822084] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 539.825903] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.016s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.825997] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.828015] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 539.828795] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.083s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.833843] env[61998]: INFO nova.compute.claims [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.842478] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c487b7f-a177-4aab-a03e-e9058c5b5f74 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.856658] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20d8a92-c8d1-4843-8818-eb46b3f2f4e2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.877410] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020fd7c9-ae77-4bc8-82e0-c0fbfb1b53bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.888517] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23fb309-cd08-4151-a225-3af3b5a895e4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.920471] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181409MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 539.920641] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.935861] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Releasing lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.937919] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 539.937919] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 539.937919] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a5af250-7952-4c98-8a58-0652b7a351cb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.950347] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce75b97-28d3-4f4d-bfa6-6cdaf5809e73 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.979640] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 81bbe6ae-87b2-414b-a872-4e03285abf92 could not be found. [ 539.979640] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 539.979754] env[61998]: INFO nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Took 0.04 seconds to destroy the instance on the hypervisor. [ 539.980734] env[61998]: DEBUG oslo.service.loopingcall [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.980734] env[61998]: DEBUG nova.compute.manager [-] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 539.980734] env[61998]: DEBUG nova.network.neutron [-] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 540.033029] env[61998]: DEBUG nova.network.neutron [-] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 540.253025] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Acquiring lock "59330fd4-c362-4593-824d-d40c00f3f5d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.253455] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Lock "59330fd4-c362-4593-824d-d40c00f3f5d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.344031] env[61998]: DEBUG nova.compute.utils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.344777] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 540.344777] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 540.457556] env[61998]: DEBUG nova.policy [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a6c574cf8b349639783160bda992717', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8a1e78f16934c5dbf5cf489f2b61c13', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 540.526629] env[61998]: DEBUG nova.compute.manager [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Received event network-vif-deleted-237ef56f-98dd-4c68-8be3-7f0387c010ff {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 540.526872] env[61998]: DEBUG nova.compute.manager [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Received event network-changed-6c9256dc-277c-45e9-aac5-1754cf36080b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 540.526942] env[61998]: DEBUG nova.compute.manager [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Refreshing instance network info cache due to event network-changed-6c9256dc-277c-45e9-aac5-1754cf36080b. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 540.527166] env[61998]: DEBUG oslo_concurrency.lockutils [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] Acquiring lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.527312] env[61998]: DEBUG oslo_concurrency.lockutils [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] Acquired lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.527466] env[61998]: DEBUG nova.network.neutron [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Refreshing network info cache for port 6c9256dc-277c-45e9-aac5-1754cf36080b {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 540.534921] env[61998]: DEBUG nova.network.neutron [-] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.848861] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 541.039247] env[61998]: INFO nova.compute.manager [-] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Took 1.06 seconds to deallocate network for instance. [ 541.040955] env[61998]: DEBUG nova.compute.claims [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 541.041170] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.072975] env[61998]: DEBUG nova.network.neutron [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.094541] env[61998]: ERROR nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. [ 541.094541] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 541.094541] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.094541] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 541.094541] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.094541] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 541.094541] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.094541] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 541.094541] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.094541] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 541.094541] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.094541] env[61998]: ERROR nova.compute.manager raise self.value [ 541.094541] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.094541] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 541.094541] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.094541] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 541.095066] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.095066] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 541.095066] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. [ 541.095066] env[61998]: ERROR nova.compute.manager [ 541.095066] env[61998]: Traceback (most recent call last): [ 541.095066] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 541.095066] env[61998]: listener.cb(fileno) [ 541.095066] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.095066] env[61998]: result = function(*args, **kwargs) [ 541.095066] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 541.095066] env[61998]: return func(*args, **kwargs) [ 541.095066] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.095066] env[61998]: raise e [ 541.095066] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.095066] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 541.095066] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.095066] env[61998]: created_port_ids = self._update_ports_for_instance( [ 541.095066] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.095066] env[61998]: with excutils.save_and_reraise_exception(): [ 541.095066] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.095066] env[61998]: self.force_reraise() [ 541.095066] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.095066] env[61998]: raise self.value [ 541.095066] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.095066] env[61998]: updated_port = self._update_port( [ 541.095066] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.095066] env[61998]: _ensure_no_port_binding_failure(port) [ 541.095066] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.095066] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 541.095764] env[61998]: nova.exception.PortBindingFailed: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. [ 541.095764] env[61998]: Removing descriptor: 17 [ 541.095764] env[61998]: ERROR nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Traceback (most recent call last): [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] yield resources [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self.driver.spawn(context, instance, image_meta, [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.095764] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] vm_ref = self.build_virtual_machine(instance, [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] for vif in network_info: [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return self._sync_wrapper(fn, *args, **kwargs) [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self.wait() [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self[:] = self._gt.wait() [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return self._exit_event.wait() [ 541.096080] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] result = hub.switch() [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return self.greenlet.switch() [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] result = function(*args, **kwargs) [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return func(*args, **kwargs) [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] raise e [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] nwinfo = self.network_api.allocate_for_instance( [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.096393] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] created_port_ids = self._update_ports_for_instance( [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] with excutils.save_and_reraise_exception(): [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self.force_reraise() [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] raise self.value [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] updated_port = self._update_port( [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] _ensure_no_port_binding_failure(port) [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.096795] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] raise exception.PortBindingFailed(port_id=port['id']) [ 541.097092] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] nova.exception.PortBindingFailed: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. [ 541.097092] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] [ 541.097092] env[61998]: INFO nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Terminating instance [ 541.098441] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Acquiring lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.098599] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Acquired lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.098755] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 541.127124] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec85433-6ca7-4c85-a03a-ff956969c44d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.139488] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e486c04-6e16-42df-ac4e-5158cb644713 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.179856] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e8211d-c1ad-40c3-a294-74f6de94c287 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.187187] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a875f3b-90e8-48a3-a0f7-e0921660a1e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.201143] env[61998]: DEBUG nova.compute.provider_tree [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.447785] env[61998]: DEBUG nova.network.neutron [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.705935] env[61998]: DEBUG nova.scheduler.client.report [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 541.747355] env[61998]: DEBUG nova.compute.manager [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Received event network-changed-c8e229b1-2459-48bd-81ff-487cca8a8b31 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 541.747647] env[61998]: DEBUG nova.compute.manager [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Refreshing instance network info cache due to event network-changed-c8e229b1-2459-48bd-81ff-487cca8a8b31. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 541.747985] env[61998]: DEBUG oslo_concurrency.lockutils [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] Acquiring lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.793319] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.859790] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 541.896586] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 541.896885] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 541.897113] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.897232] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 541.897374] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.897518] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 541.897796] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 541.897893] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 541.898126] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 541.898300] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 541.898473] env[61998]: DEBUG nova.virt.hardware [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 541.899415] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406cc4ec-bc33-4396-973c-8341f96f6c6e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.907974] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542eeccd-eb82-4064-a634-b116bec2fba1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.951248] env[61998]: DEBUG oslo_concurrency.lockutils [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] Releasing lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.951658] env[61998]: DEBUG nova.compute.manager [req-6733fe0e-b59f-4260-bd2f-02ed90b02ab1 req-460e035b-db06-4ddc-8dd7-c226c4eb0a5e service nova] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Received event network-vif-deleted-6c9256dc-277c-45e9-aac5-1754cf36080b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 541.974720] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.023331] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Successfully created port: 4dcbe70e-1499-4982-bf91-78ea40b01f17 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.211333] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.211980] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 542.221681] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.068s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.478477] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Releasing lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.479418] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 542.479816] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 542.480687] env[61998]: DEBUG oslo_concurrency.lockutils [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] Acquired lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.480687] env[61998]: DEBUG nova.network.neutron [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Refreshing network info cache for port c8e229b1-2459-48bd-81ff-487cca8a8b31 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 542.486946] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55cb3c47-d765-4a91-aba5-78ae1fb50b35 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.499937] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70c1c11-cc4e-4af4-a67d-f562054ad529 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.530992] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 687eaa4d-012a-4dd9-9033-16fad3650a56 could not be found. [ 542.531320] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 542.531503] env[61998]: INFO nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Took 0.05 seconds to destroy the instance on the hypervisor. [ 542.531750] env[61998]: DEBUG oslo.service.loopingcall [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.531962] env[61998]: DEBUG nova.compute.manager [-] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 542.533219] env[61998]: DEBUG nova.network.neutron [-] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 542.588365] env[61998]: DEBUG nova.network.neutron [-] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.726258] env[61998]: DEBUG nova.compute.utils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.735903] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 542.736211] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 542.899232] env[61998]: DEBUG nova.policy [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28744b94219543a59520038db02a1a4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46a71ba06342454ebd5578ff646cea6a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 543.031184] env[61998]: DEBUG nova.network.neutron [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.088790] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "df154c2a-3616-442d-abb0-83e68cf1141d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.089068] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "df154c2a-3616-442d-abb0-83e68cf1141d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.092143] env[61998]: DEBUG nova.network.neutron [-] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.094045] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bae09a-3f83-4221-b927-428e60efaf6a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.106315] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6ac591-79de-4d40-891d-51fdbee5ced9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.148023] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0c4bae-eea0-42f6-a3a8-667f7e17b24b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.155293] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfbe8f0-c9d1-4815-923c-cd601b6ec096 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.173243] env[61998]: DEBUG nova.compute.provider_tree [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.228358] env[61998]: ERROR nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. [ 543.228358] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 543.228358] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 543.228358] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 543.228358] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.228358] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 543.228358] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.228358] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 543.228358] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.228358] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 543.228358] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.228358] env[61998]: ERROR nova.compute.manager raise self.value [ 543.228358] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.228358] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 543.228358] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.228358] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 543.228911] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.228911] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 543.228911] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. [ 543.228911] env[61998]: ERROR nova.compute.manager [ 543.228911] env[61998]: Traceback (most recent call last): [ 543.228911] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 543.228911] env[61998]: listener.cb(fileno) [ 543.228911] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.228911] env[61998]: result = function(*args, **kwargs) [ 543.228911] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.228911] env[61998]: return func(*args, **kwargs) [ 543.228911] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 543.228911] env[61998]: raise e [ 543.228911] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 543.228911] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 543.228911] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.228911] env[61998]: created_port_ids = self._update_ports_for_instance( [ 543.228911] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.228911] env[61998]: with excutils.save_and_reraise_exception(): [ 543.228911] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.228911] env[61998]: self.force_reraise() [ 543.228911] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.228911] env[61998]: raise self.value [ 543.228911] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.228911] env[61998]: updated_port = self._update_port( [ 543.228911] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.228911] env[61998]: _ensure_no_port_binding_failure(port) [ 543.228911] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.228911] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 543.229662] env[61998]: nova.exception.PortBindingFailed: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. [ 543.229662] env[61998]: Removing descriptor: 19 [ 543.229662] env[61998]: ERROR nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Traceback (most recent call last): [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] yield resources [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self.driver.spawn(context, instance, image_meta, [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.229662] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] vm_ref = self.build_virtual_machine(instance, [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] for vif in network_info: [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return self._sync_wrapper(fn, *args, **kwargs) [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self.wait() [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self[:] = self._gt.wait() [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return self._exit_event.wait() [ 543.230027] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] result = hub.switch() [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return self.greenlet.switch() [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] result = function(*args, **kwargs) [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return func(*args, **kwargs) [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] raise e [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] nwinfo = self.network_api.allocate_for_instance( [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.230476] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] created_port_ids = self._update_ports_for_instance( [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] with excutils.save_and_reraise_exception(): [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self.force_reraise() [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] raise self.value [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] updated_port = self._update_port( [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] _ensure_no_port_binding_failure(port) [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.230863] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] raise exception.PortBindingFailed(port_id=port['id']) [ 543.231203] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] nova.exception.PortBindingFailed: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. [ 543.231203] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] [ 543.231203] env[61998]: INFO nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Terminating instance [ 543.231718] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Acquiring lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.231718] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Acquired lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.233859] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 543.234020] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 543.253096] env[61998]: DEBUG nova.network.neutron [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.601104] env[61998]: INFO nova.compute.manager [-] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Took 1.07 seconds to deallocate network for instance. [ 543.606340] env[61998]: DEBUG nova.compute.claims [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 543.606340] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.680420] env[61998]: DEBUG nova.scheduler.client.report [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 543.757285] env[61998]: DEBUG oslo_concurrency.lockutils [req-5f721b77-ad2b-41b4-ba02-74a589327716 req-4bcba293-9de0-4168-9bed-4a060dfb36bd service nova] Releasing lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.801504] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.030645] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "a8f6254f-b867-4967-b4fa-bb70f471f89d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.030876] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "a8f6254f-b867-4967-b4fa-bb70f471f89d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.090977] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.101446] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Successfully created port: ab247aa6-38cc-4ff4-bdeb-351ba048048e {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 544.188390] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.969s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.189135] env[61998]: ERROR nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Traceback (most recent call last): [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self.driver.spawn(context, instance, image_meta, [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] vm_ref = self.build_virtual_machine(instance, [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] vif_infos = vmwarevif.get_vif_info(self._session, [ 544.189135] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] for vif in network_info: [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return self._sync_wrapper(fn, *args, **kwargs) [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self.wait() [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self[:] = self._gt.wait() [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return self._exit_event.wait() [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] result = hub.switch() [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 544.189481] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return self.greenlet.switch() [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] result = function(*args, **kwargs) [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] return func(*args, **kwargs) [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] raise e [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] nwinfo = self.network_api.allocate_for_instance( [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] created_port_ids = self._update_ports_for_instance( [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] with excutils.save_and_reraise_exception(): [ 544.189837] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] self.force_reraise() [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] raise self.value [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] updated_port = self._update_port( [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] _ensure_no_port_binding_failure(port) [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] raise exception.PortBindingFailed(port_id=port['id']) [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] nova.exception.PortBindingFailed: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. [ 544.190201] env[61998]: ERROR nova.compute.manager [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] [ 544.190682] env[61998]: DEBUG nova.compute.utils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 544.191412] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.616s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.193830] env[61998]: INFO nova.compute.claims [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.197242] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Build of instance 2a24c5ce-9b52-49ae-b847-6280cef02eb5 was re-scheduled: Binding failed for port 80bd11f6-156e-47d4-b4a6-925e658bfa06, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 544.197645] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 544.198153] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.198153] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquired lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.198269] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 544.251719] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 544.279052] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.279308] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.279469] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.279652] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.279979] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.279979] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.280806] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.281010] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.281219] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.281460] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.281661] env[61998]: DEBUG nova.virt.hardware [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.282540] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a6af74-0b1c-4124-ad48-6ff6d66bbc63 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.291364] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ece395-7923-4a55-9843-40e20c415cbb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.593775] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Releasing lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.594303] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 544.594493] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 544.594799] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20c595e9-cd26-4ffd-b47a-6e5f968616e6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.606274] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08815824-5929-49d7-a161-6553d4e701d3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.637835] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f could not be found. [ 544.638130] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 544.638333] env[61998]: INFO nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 544.638577] env[61998]: DEBUG oslo.service.loopingcall [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.640664] env[61998]: DEBUG nova.compute.manager [-] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 544.640912] env[61998]: DEBUG nova.network.neutron [-] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 544.675150] env[61998]: DEBUG nova.network.neutron [-] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.732526] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.899248] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.985948] env[61998]: DEBUG nova.compute.manager [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Received event network-vif-deleted-c8e229b1-2459-48bd-81ff-487cca8a8b31 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 544.989401] env[61998]: DEBUG nova.compute.manager [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Received event network-changed-b2f2be33-0c12-458e-a815-2fa188123fbb {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 544.989401] env[61998]: DEBUG nova.compute.manager [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Refreshing instance network info cache due to event network-changed-b2f2be33-0c12-458e-a815-2fa188123fbb. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 544.989401] env[61998]: DEBUG oslo_concurrency.lockutils [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] Acquiring lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.989401] env[61998]: DEBUG oslo_concurrency.lockutils [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] Acquired lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.989401] env[61998]: DEBUG nova.network.neutron [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Refreshing network info cache for port b2f2be33-0c12-458e-a815-2fa188123fbb {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 545.088913] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Acquiring lock "ad2f23df-c067-4d30-b143-e50ebcc50d4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.089312] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Lock "ad2f23df-c067-4d30-b143-e50ebcc50d4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.179042] env[61998]: DEBUG nova.network.neutron [-] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.393894] env[61998]: ERROR nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. [ 545.393894] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 545.393894] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 545.393894] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 545.393894] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 545.393894] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 545.393894] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 545.393894] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 545.393894] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.393894] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 545.393894] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.393894] env[61998]: ERROR nova.compute.manager raise self.value [ 545.393894] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 545.393894] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 545.393894] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.393894] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 545.394408] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.394408] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 545.394408] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. [ 545.394408] env[61998]: ERROR nova.compute.manager [ 545.394408] env[61998]: Traceback (most recent call last): [ 545.394408] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 545.394408] env[61998]: listener.cb(fileno) [ 545.394408] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.394408] env[61998]: result = function(*args, **kwargs) [ 545.394408] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 545.394408] env[61998]: return func(*args, **kwargs) [ 545.394408] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 545.394408] env[61998]: raise e [ 545.394408] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 545.394408] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 545.394408] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 545.394408] env[61998]: created_port_ids = self._update_ports_for_instance( [ 545.394408] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 545.394408] env[61998]: with excutils.save_and_reraise_exception(): [ 545.394408] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.394408] env[61998]: self.force_reraise() [ 545.394408] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.394408] env[61998]: raise self.value [ 545.394408] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 545.394408] env[61998]: updated_port = self._update_port( [ 545.394408] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.394408] env[61998]: _ensure_no_port_binding_failure(port) [ 545.394408] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.394408] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 545.395138] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. [ 545.395138] env[61998]: Removing descriptor: 15 [ 545.395138] env[61998]: ERROR nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Traceback (most recent call last): [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] yield resources [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self.driver.spawn(context, instance, image_meta, [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.395138] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] vm_ref = self.build_virtual_machine(instance, [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] for vif in network_info: [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return self._sync_wrapper(fn, *args, **kwargs) [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self.wait() [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self[:] = self._gt.wait() [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return self._exit_event.wait() [ 545.395504] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] result = hub.switch() [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return self.greenlet.switch() [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] result = function(*args, **kwargs) [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return func(*args, **kwargs) [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] raise e [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] nwinfo = self.network_api.allocate_for_instance( [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 545.395815] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] created_port_ids = self._update_ports_for_instance( [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] with excutils.save_and_reraise_exception(): [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self.force_reraise() [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] raise self.value [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] updated_port = self._update_port( [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] _ensure_no_port_binding_failure(port) [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.396143] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] raise exception.PortBindingFailed(port_id=port['id']) [ 545.396433] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] nova.exception.PortBindingFailed: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. [ 545.396433] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] [ 545.396433] env[61998]: INFO nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Terminating instance [ 545.400358] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Acquiring lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.400358] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Acquired lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.401477] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 545.405101] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Releasing lock "refresh_cache-2a24c5ce-9b52-49ae-b847-6280cef02eb5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.405101] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 545.405101] env[61998]: DEBUG nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 545.405101] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 545.446487] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.516038] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af455a7-cce8-45eb-b610-050374920269 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.527471] env[61998]: DEBUG nova.network.neutron [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.533105] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013335f6-efcd-4a72-a518-c0c4da831c7c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.570329] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff34c5a-7d30-49af-83a1-c0d5e12629df {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.577970] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40694156-0602-49ac-ac24-26e190a8c038 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.591628] env[61998]: DEBUG nova.compute.provider_tree [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.683842] env[61998]: INFO nova.compute.manager [-] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Took 1.04 seconds to deallocate network for instance. [ 545.687030] env[61998]: DEBUG nova.compute.claims [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 545.687212] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.809545] env[61998]: DEBUG nova.network.neutron [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.953023] env[61998]: DEBUG nova.network.neutron [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.955864] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 546.098632] env[61998]: DEBUG nova.scheduler.client.report [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 546.156426] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.190437] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Acquiring lock "e37ac276-8a3e-45b3-8176-d972eb4e6e3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.190726] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Lock "e37ac276-8a3e-45b3-8176-d972eb4e6e3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.313203] env[61998]: DEBUG oslo_concurrency.lockutils [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] Releasing lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.313474] env[61998]: DEBUG nova.compute.manager [req-c00de441-10ce-4419-b84e-483a84166bae req-43b6e5f9-bfef-4c9e-a270-267ab272c91b service nova] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Received event network-vif-deleted-b2f2be33-0c12-458e-a815-2fa188123fbb {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 546.460086] env[61998]: INFO nova.compute.manager [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] Took 1.05 seconds to deallocate network for instance. [ 546.611368] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.611368] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 546.619218] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.726s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.665213] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Releasing lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.665633] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 546.666288] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 546.666864] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12d43a01-ca5a-407f-86b3-a319fa4f7978 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.676212] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4676f686-9865-4049-817b-b77901f81098 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.699933] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e could not be found. [ 546.700231] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 546.700401] env[61998]: INFO nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 546.700567] env[61998]: DEBUG oslo.service.loopingcall [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 546.702405] env[61998]: DEBUG nova.compute.manager [-] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 546.702405] env[61998]: DEBUG nova.network.neutron [-] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 546.731154] env[61998]: DEBUG nova.network.neutron [-] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 546.751762] env[61998]: DEBUG nova.compute.manager [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Received event network-changed-4dcbe70e-1499-4982-bf91-78ea40b01f17 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 546.752219] env[61998]: DEBUG nova.compute.manager [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Refreshing instance network info cache due to event network-changed-4dcbe70e-1499-4982-bf91-78ea40b01f17. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 546.752219] env[61998]: DEBUG oslo_concurrency.lockutils [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] Acquiring lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.752331] env[61998]: DEBUG oslo_concurrency.lockutils [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] Acquired lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.752468] env[61998]: DEBUG nova.network.neutron [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Refreshing network info cache for port 4dcbe70e-1499-4982-bf91-78ea40b01f17 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 547.094158] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "975b0c65-6f57-4c7c-ae46-b23920a039f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.094560] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "975b0c65-6f57-4c7c-ae46-b23920a039f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.118840] env[61998]: DEBUG nova.compute.utils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.124790] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 547.124790] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 547.235390] env[61998]: DEBUG nova.network.neutron [-] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.244656] env[61998]: DEBUG nova.policy [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '270493996eaa4eb8b25b9f78f104c05e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97c048d8631543ef99ec5fc8c2a67aee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 547.306135] env[61998]: DEBUG nova.network.neutron [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.310345] env[61998]: ERROR nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. [ 547.310345] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 547.310345] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 547.310345] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 547.310345] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.310345] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 547.310345] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.310345] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 547.310345] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.310345] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 547.310345] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.310345] env[61998]: ERROR nova.compute.manager raise self.value [ 547.310345] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.310345] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 547.310345] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.310345] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 547.310845] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.310845] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 547.310845] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. [ 547.310845] env[61998]: ERROR nova.compute.manager [ 547.310845] env[61998]: Traceback (most recent call last): [ 547.311039] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 547.311039] env[61998]: listener.cb(fileno) [ 547.311039] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.311039] env[61998]: result = function(*args, **kwargs) [ 547.311039] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.311039] env[61998]: return func(*args, **kwargs) [ 547.311039] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 547.311039] env[61998]: raise e [ 547.311039] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 547.311039] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 547.311039] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.311039] env[61998]: created_port_ids = self._update_ports_for_instance( [ 547.311039] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.311039] env[61998]: with excutils.save_and_reraise_exception(): [ 547.311039] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.311039] env[61998]: self.force_reraise() [ 547.311039] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.311039] env[61998]: raise self.value [ 547.311039] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.311039] env[61998]: updated_port = self._update_port( [ 547.311039] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.311039] env[61998]: _ensure_no_port_binding_failure(port) [ 547.311039] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.311039] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 547.311675] env[61998]: nova.exception.PortBindingFailed: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. [ 547.311675] env[61998]: Removing descriptor: 16 [ 547.312872] env[61998]: ERROR nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Traceback (most recent call last): [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] yield resources [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self.driver.spawn(context, instance, image_meta, [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] vm_ref = self.build_virtual_machine(instance, [ 547.312872] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] for vif in network_info: [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return self._sync_wrapper(fn, *args, **kwargs) [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self.wait() [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self[:] = self._gt.wait() [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return self._exit_event.wait() [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.313209] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] result = hub.switch() [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return self.greenlet.switch() [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] result = function(*args, **kwargs) [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return func(*args, **kwargs) [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] raise e [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] nwinfo = self.network_api.allocate_for_instance( [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] created_port_ids = self._update_ports_for_instance( [ 547.313560] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] with excutils.save_and_reraise_exception(): [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self.force_reraise() [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] raise self.value [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] updated_port = self._update_port( [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] _ensure_no_port_binding_failure(port) [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] raise exception.PortBindingFailed(port_id=port['id']) [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] nova.exception.PortBindingFailed: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. [ 547.313967] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] [ 547.314343] env[61998]: INFO nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Terminating instance [ 547.320732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Acquiring lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.320732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Acquired lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.320732] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 547.501808] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efcc908f-9df5-4131-a48d-345e6a3d4912 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.505379] env[61998]: INFO nova.scheduler.client.report [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Deleted allocations for instance 2a24c5ce-9b52-49ae-b847-6280cef02eb5 [ 547.519743] env[61998]: DEBUG nova.network.neutron [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.524656] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4039461-3822-45e9-b01e-c0baf3ee2045 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.560123] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71509a4-d8d6-415c-8830-47311ec5476c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.568440] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af50779-0829-485e-9d8c-ecacf71c7d94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.585121] env[61998]: DEBUG nova.compute.provider_tree [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.627563] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 547.738156] env[61998]: INFO nova.compute.manager [-] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Took 1.04 seconds to deallocate network for instance. [ 547.742221] env[61998]: DEBUG nova.compute.claims [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 547.742221] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.849134] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.907270] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Successfully created port: 583592a8-3db1-4ec9-9a9f-79479d91196d {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 548.022730] env[61998]: DEBUG oslo_concurrency.lockutils [None req-92e6cd6e-9e30-40f4-a888-c4c19bae20fb tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "2a24c5ce-9b52-49ae-b847-6280cef02eb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.911s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.025211] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "2a24c5ce-9b52-49ae-b847-6280cef02eb5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.721s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.025211] env[61998]: INFO nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2a24c5ce-9b52-49ae-b847-6280cef02eb5] During sync_power_state the instance has a pending task (spawning). Skip. [ 548.025211] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "2a24c5ce-9b52-49ae-b847-6280cef02eb5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.025349] env[61998]: DEBUG oslo_concurrency.lockutils [req-400b653d-2c68-4a77-b124-fefb2d1bb71b req-5cf595de-8ceb-4bb7-945f-0cc246fdb48c service nova] Releasing lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.088494] env[61998]: DEBUG nova.scheduler.client.report [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 548.137529] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.530022] env[61998]: DEBUG nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 548.596105] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.979s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.596755] env[61998]: ERROR nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Traceback (most recent call last): [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self.driver.spawn(context, instance, image_meta, [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] vm_ref = self.build_virtual_machine(instance, [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.596755] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] for vif in network_info: [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return self._sync_wrapper(fn, *args, **kwargs) [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self.wait() [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self[:] = self._gt.wait() [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return self._exit_event.wait() [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] result = hub.switch() [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.597134] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return self.greenlet.switch() [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] result = function(*args, **kwargs) [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] return func(*args, **kwargs) [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] raise e [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] nwinfo = self.network_api.allocate_for_instance( [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] created_port_ids = self._update_ports_for_instance( [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] with excutils.save_and_reraise_exception(): [ 548.597511] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] self.force_reraise() [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] raise self.value [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] updated_port = self._update_port( [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] _ensure_no_port_binding_failure(port) [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] raise exception.PortBindingFailed(port_id=port['id']) [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] nova.exception.PortBindingFailed: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. [ 548.598966] env[61998]: ERROR nova.compute.manager [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] [ 548.599338] env[61998]: DEBUG nova.compute.utils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 548.599338] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.210s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.603761] env[61998]: INFO nova.compute.claims [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.609377] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Build of instance 5a590ae6-eb88-433d-81b4-33e7b6ace868 was re-scheduled: Binding failed for port 180ea470-060a-4016-b570-1e5bf3515605, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 548.609377] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 548.609654] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Acquiring lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.609757] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Acquired lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.609918] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 548.638114] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 548.641241] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Releasing lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.642032] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 548.642553] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 548.642894] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca0ecab9-8f99-4496-9e65-3d1a94f23149 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.656703] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe99e72-f6fb-4ce4-9912-31716829d0a8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.678868] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.679147] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.679312] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.679487] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.679632] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.679777] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.679990] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.680171] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.680333] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.680487] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.680654] env[61998]: DEBUG nova.virt.hardware [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.681506] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4234dc44-00b7-411f-a1cb-c4502fae3dd7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.689643] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 782f1eba-459c-4249-b74a-128b22c64ca3 could not be found. [ 548.689842] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 548.690040] env[61998]: INFO nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 548.690262] env[61998]: DEBUG oslo.service.loopingcall [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.690900] env[61998]: DEBUG nova.compute.manager [-] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 548.691054] env[61998]: DEBUG nova.network.neutron [-] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 548.695964] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60068520-9f5f-4ceb-8c76-856ef3919046 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.736411] env[61998]: DEBUG nova.network.neutron [-] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.750509] env[61998]: DEBUG nova.compute.manager [None req-6dc0f83d-57af-40f0-93da-0963242c4b8c tempest-ServerDiagnosticsV248Test-2030743529 tempest-ServerDiagnosticsV248Test-2030743529-project-admin] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 548.751571] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82248153-afcb-4cfb-a8fc-c7c62a7fdbd9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.758947] env[61998]: INFO nova.compute.manager [None req-6dc0f83d-57af-40f0-93da-0963242c4b8c tempest-ServerDiagnosticsV248Test-2030743529 tempest-ServerDiagnosticsV248Test-2030743529-project-admin] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Retrieving diagnostics [ 548.759818] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27b5109-33b6-416b-82ff-5a3d8451cbbc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.064987] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.139152] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.240823] env[61998]: DEBUG nova.network.neutron [-] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.385988] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.744244] env[61998]: INFO nova.compute.manager [-] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Took 1.05 seconds to deallocate network for instance. [ 549.746619] env[61998]: DEBUG nova.compute.claims [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 549.746820] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.895524] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Releasing lock "refresh_cache-5a590ae6-eb88-433d-81b4-33e7b6ace868" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.895811] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 549.895939] env[61998]: DEBUG nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 549.896131] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 549.932115] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c163a83a-eca4-493f-bc87-66ae19de7156 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.937112] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.942537] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2406a3-133d-4dfa-990a-2718803fc638 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.978452] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0177e6-7f2e-471f-afe4-74eca1c93507 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.986333] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66b7035-08ce-4dcb-949f-410c0003679f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.000567] env[61998]: DEBUG nova.compute.provider_tree [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.027276] env[61998]: DEBUG nova.compute.manager [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Received event network-vif-deleted-4dcbe70e-1499-4982-bf91-78ea40b01f17 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 550.027473] env[61998]: DEBUG nova.compute.manager [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Received event network-changed-ab247aa6-38cc-4ff4-bdeb-351ba048048e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 550.027626] env[61998]: DEBUG nova.compute.manager [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Refreshing instance network info cache due to event network-changed-ab247aa6-38cc-4ff4-bdeb-351ba048048e. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 550.027830] env[61998]: DEBUG oslo_concurrency.lockutils [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] Acquiring lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.027963] env[61998]: DEBUG oslo_concurrency.lockutils [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] Acquired lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.032449] env[61998]: DEBUG nova.network.neutron [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Refreshing network info cache for port ab247aa6-38cc-4ff4-bdeb-351ba048048e {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 550.438600] env[61998]: DEBUG nova.network.neutron [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.488251] env[61998]: ERROR nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. [ 550.488251] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 550.488251] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 550.488251] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 550.488251] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.488251] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 550.488251] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.488251] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 550.488251] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.488251] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 550.488251] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.488251] env[61998]: ERROR nova.compute.manager raise self.value [ 550.488251] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.488251] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 550.488251] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.488251] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 550.488747] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.488747] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 550.488747] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. [ 550.488747] env[61998]: ERROR nova.compute.manager [ 550.488747] env[61998]: Traceback (most recent call last): [ 550.488747] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 550.488747] env[61998]: listener.cb(fileno) [ 550.488747] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.488747] env[61998]: result = function(*args, **kwargs) [ 550.488747] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.488747] env[61998]: return func(*args, **kwargs) [ 550.488747] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 550.488747] env[61998]: raise e [ 550.488747] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 550.488747] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 550.488747] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.488747] env[61998]: created_port_ids = self._update_ports_for_instance( [ 550.488747] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.488747] env[61998]: with excutils.save_and_reraise_exception(): [ 550.488747] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.488747] env[61998]: self.force_reraise() [ 550.488747] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.488747] env[61998]: raise self.value [ 550.488747] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.488747] env[61998]: updated_port = self._update_port( [ 550.488747] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.488747] env[61998]: _ensure_no_port_binding_failure(port) [ 550.488747] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.488747] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 550.489570] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. [ 550.489570] env[61998]: Removing descriptor: 15 [ 550.489803] env[61998]: ERROR nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Traceback (most recent call last): [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] yield resources [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self.driver.spawn(context, instance, image_meta, [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] vm_ref = self.build_virtual_machine(instance, [ 550.489803] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] for vif in network_info: [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return self._sync_wrapper(fn, *args, **kwargs) [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self.wait() [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self[:] = self._gt.wait() [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return self._exit_event.wait() [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.490176] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] result = hub.switch() [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return self.greenlet.switch() [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] result = function(*args, **kwargs) [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return func(*args, **kwargs) [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] raise e [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] nwinfo = self.network_api.allocate_for_instance( [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] created_port_ids = self._update_ports_for_instance( [ 550.490492] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] with excutils.save_and_reraise_exception(): [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self.force_reraise() [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] raise self.value [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] updated_port = self._update_port( [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] _ensure_no_port_binding_failure(port) [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] raise exception.PortBindingFailed(port_id=port['id']) [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] nova.exception.PortBindingFailed: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. [ 550.490981] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] [ 550.491514] env[61998]: INFO nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Terminating instance [ 550.494991] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Acquiring lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.495059] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Acquired lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.495208] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 550.503330] env[61998]: DEBUG nova.scheduler.client.report [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 550.559221] env[61998]: DEBUG nova.network.neutron [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.742817] env[61998]: DEBUG nova.network.neutron [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.943080] env[61998]: INFO nova.compute.manager [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] Took 1.05 seconds to deallocate network for instance. [ 551.011132] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.011132] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 551.011901] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.575s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.032609] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.202585] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.247633] env[61998]: DEBUG oslo_concurrency.lockutils [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] Releasing lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.247937] env[61998]: DEBUG nova.compute.manager [req-74e46721-12a6-48f1-b240-b54e05f4a1fc req-a0002016-02ac-4ce4-ab0d-3bfabcadb32e service nova] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Received event network-vif-deleted-ab247aa6-38cc-4ff4-bdeb-351ba048048e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 551.261848] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "6236c44a-e3c6-4302-8f15-4eb8dfaf5960" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.262202] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "6236c44a-e3c6-4302-8f15-4eb8dfaf5960" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.423282] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.423559] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.423799] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.423990] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.424172] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.430471] env[61998]: INFO nova.compute.manager [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Terminating instance [ 551.433209] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "refresh_cache-2aabbd53-4c4d-4b53-8135-34cc5a17fd47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.433209] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquired lock "refresh_cache-2aabbd53-4c4d-4b53-8135-34cc5a17fd47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.433350] env[61998]: DEBUG nova.network.neutron [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 551.516382] env[61998]: DEBUG nova.compute.utils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.522828] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 551.523018] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 551.636683] env[61998]: DEBUG nova.policy [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7884d802d68948fe9e3e441a2972beda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '009c58d30ac2420c94539ca063e22012', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 551.705566] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Releasing lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.706242] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 551.706401] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 551.706766] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0270d62-5298-4e07-9be4-615c6c7aabb2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.719301] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dcfcd0-a439-4c2a-8b81-6c7bf141d4d6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.759996] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ac0209e5-66d0-4a04-892d-85eba3c3663a could not be found. [ 551.761617] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 551.761617] env[61998]: INFO nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 551.761617] env[61998]: DEBUG oslo.service.loopingcall [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.764691] env[61998]: DEBUG nova.compute.manager [-] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 551.764691] env[61998]: DEBUG nova.network.neutron [-] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 551.779937] env[61998]: DEBUG nova.network.neutron [-] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.899849] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250e285a-1247-4f09-b643-b6be853e2373 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.908097] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c38d37-57c2-4987-9d6c-d932b4779378 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.942057] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc22642-77cf-4eed-bd29-ab9c3b079334 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.950441] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36b32b4-536f-47d8-a583-c2a765a3f537 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.965223] env[61998]: DEBUG nova.compute.provider_tree [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.971507] env[61998]: DEBUG nova.network.neutron [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.981729] env[61998]: INFO nova.scheduler.client.report [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Deleted allocations for instance 5a590ae6-eb88-433d-81b4-33e7b6ace868 [ 552.027786] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 552.074203] env[61998]: DEBUG nova.network.neutron [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.156330] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Successfully created port: 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.284404] env[61998]: DEBUG nova.network.neutron [-] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.474894] env[61998]: DEBUG nova.scheduler.client.report [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 552.491804] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f20b4b76-1825-404d-ac4b-64ce6a59ca6d tempest-ServerDiagnosticsNegativeTest-1537790555 tempest-ServerDiagnosticsNegativeTest-1537790555-project-member] Lock "5a590ae6-eb88-433d-81b4-33e7b6ace868" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.182s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.494553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "5a590ae6-eb88-433d-81b4-33e7b6ace868" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 21.190s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.494553] env[61998]: INFO nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5a590ae6-eb88-433d-81b4-33e7b6ace868] During sync_power_state the instance has a pending task (spawning). Skip. [ 552.494553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "5a590ae6-eb88-433d-81b4-33e7b6ace868" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.569829] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Acquiring lock "71248677-92fb-4f66-b089-2cbbdc808bb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.570071] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Lock "71248677-92fb-4f66-b089-2cbbdc808bb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.578304] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Releasing lock "refresh_cache-2aabbd53-4c4d-4b53-8135-34cc5a17fd47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.579107] env[61998]: DEBUG nova.compute.manager [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 552.579107] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 552.580286] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b305031-0de6-4834-bbee-691df8ef22ef {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.593348] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 552.593597] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-589c1ed1-51cc-430a-a476-5f29a2eef6ff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.600477] env[61998]: DEBUG oslo_vmware.api [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 552.600477] env[61998]: value = "task-1388365" [ 552.600477] env[61998]: _type = "Task" [ 552.600477] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.610459] env[61998]: DEBUG oslo_vmware.api [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.788180] env[61998]: INFO nova.compute.manager [-] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Took 1.02 seconds to deallocate network for instance. [ 552.790547] env[61998]: DEBUG nova.compute.claims [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 552.790714] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.980715] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.969s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.981384] env[61998]: ERROR nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Traceback (most recent call last): [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self.driver.spawn(context, instance, image_meta, [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] vm_ref = self.build_virtual_machine(instance, [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.981384] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] for vif in network_info: [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return self._sync_wrapper(fn, *args, **kwargs) [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self.wait() [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self[:] = self._gt.wait() [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return self._exit_event.wait() [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] result = hub.switch() [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.981761] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return self.greenlet.switch() [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] result = function(*args, **kwargs) [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] return func(*args, **kwargs) [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] raise e [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] nwinfo = self.network_api.allocate_for_instance( [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] created_port_ids = self._update_ports_for_instance( [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] with excutils.save_and_reraise_exception(): [ 552.982224] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] self.force_reraise() [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] raise self.value [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] updated_port = self._update_port( [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] _ensure_no_port_binding_failure(port) [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] raise exception.PortBindingFailed(port_id=port['id']) [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] nova.exception.PortBindingFailed: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. [ 552.982610] env[61998]: ERROR nova.compute.manager [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] [ 552.982942] env[61998]: DEBUG nova.compute.utils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 552.983849] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.063s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.986024] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Build of instance 4cafaebd-b577-4a7e-92b8-151445f66a0d was re-scheduled: Binding failed for port 237ef56f-98dd-4c68-8be3-7f0387c010ff, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 552.986024] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 552.986201] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.986341] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquired lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.986499] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 552.995156] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 553.041174] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 553.073165] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:49:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='772622332',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-596299438',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.073165] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.073165] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.073332] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.073332] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.073464] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.073666] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.073818] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.073985] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.078728] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.078728] env[61998]: DEBUG nova.virt.hardware [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.079843] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a270de-6aee-4b93-928d-b678500e4380 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.088807] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edba86b-3362-47ca-8861-2f825d76143f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.114699] env[61998]: DEBUG oslo_vmware.api [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388365, 'name': PowerOffVM_Task, 'duration_secs': 0.117569} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.114699] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 553.114841] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 553.115125] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb043217-eb77-4fcd-b0d9-d4037a196b11 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.138976] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 553.139540] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 553.139540] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Deleting the datastore file [datastore2] 2aabbd53-4c4d-4b53-8135-34cc5a17fd47 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 553.139669] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edac361e-947b-4b32-8ad0-4812929b0135 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.147129] env[61998]: DEBUG oslo_vmware.api [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for the task: (returnval){ [ 553.147129] env[61998]: value = "task-1388367" [ 553.147129] env[61998]: _type = "Task" [ 553.147129] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.155608] env[61998]: DEBUG oslo_vmware.api [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.202287] env[61998]: DEBUG nova.compute.manager [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Received event network-changed-583592a8-3db1-4ec9-9a9f-79479d91196d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 553.202534] env[61998]: DEBUG nova.compute.manager [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Refreshing instance network info cache due to event network-changed-583592a8-3db1-4ec9-9a9f-79479d91196d. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 553.202754] env[61998]: DEBUG oslo_concurrency.lockutils [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] Acquiring lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.202894] env[61998]: DEBUG oslo_concurrency.lockutils [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] Acquired lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.203222] env[61998]: DEBUG nova.network.neutron [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Refreshing network info cache for port 583592a8-3db1-4ec9-9a9f-79479d91196d {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.482706] env[61998]: ERROR nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. [ 553.482706] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 553.482706] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.482706] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 553.482706] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 553.482706] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 553.482706] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 553.482706] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 553.482706] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.482706] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 553.482706] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.482706] env[61998]: ERROR nova.compute.manager raise self.value [ 553.482706] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 553.482706] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 553.482706] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.482706] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 553.483242] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.483242] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 553.483242] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. [ 553.483242] env[61998]: ERROR nova.compute.manager [ 553.483242] env[61998]: Traceback (most recent call last): [ 553.483242] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 553.483242] env[61998]: listener.cb(fileno) [ 553.483242] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.483242] env[61998]: result = function(*args, **kwargs) [ 553.483242] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 553.483242] env[61998]: return func(*args, **kwargs) [ 553.483242] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 553.483242] env[61998]: raise e [ 553.483242] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.483242] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 553.483242] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 553.483242] env[61998]: created_port_ids = self._update_ports_for_instance( [ 553.483242] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 553.483242] env[61998]: with excutils.save_and_reraise_exception(): [ 553.483242] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.483242] env[61998]: self.force_reraise() [ 553.483242] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.483242] env[61998]: raise self.value [ 553.483242] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 553.483242] env[61998]: updated_port = self._update_port( [ 553.483242] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.483242] env[61998]: _ensure_no_port_binding_failure(port) [ 553.483242] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.483242] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 553.484019] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. [ 553.484019] env[61998]: Removing descriptor: 15 [ 553.484019] env[61998]: ERROR nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Traceback (most recent call last): [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] yield resources [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self.driver.spawn(context, instance, image_meta, [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.484019] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] vm_ref = self.build_virtual_machine(instance, [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] for vif in network_info: [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return self._sync_wrapper(fn, *args, **kwargs) [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self.wait() [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self[:] = self._gt.wait() [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return self._exit_event.wait() [ 553.484326] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] result = hub.switch() [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return self.greenlet.switch() [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] result = function(*args, **kwargs) [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return func(*args, **kwargs) [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] raise e [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] nwinfo = self.network_api.allocate_for_instance( [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 553.484659] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] created_port_ids = self._update_ports_for_instance( [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] with excutils.save_and_reraise_exception(): [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self.force_reraise() [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] raise self.value [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] updated_port = self._update_port( [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] _ensure_no_port_binding_failure(port) [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.485015] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] raise exception.PortBindingFailed(port_id=port['id']) [ 553.485402] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] nova.exception.PortBindingFailed: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. [ 553.485402] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] [ 553.485402] env[61998]: INFO nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Terminating instance [ 553.488266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Acquiring lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.488266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Acquired lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.488266] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 553.516580] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.517469] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.571297] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.637747] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Acquiring lock "fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.638067] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Lock "fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.659225] env[61998]: DEBUG oslo_vmware.api [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Task: {'id': task-1388367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092459} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.659225] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 553.659225] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 553.659225] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 553.659225] env[61998]: INFO nova.compute.manager [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Took 1.08 seconds to destroy the instance on the hypervisor. [ 553.659425] env[61998]: DEBUG oslo.service.loopingcall [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.659457] env[61998]: DEBUG nova.compute.manager [-] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 553.659644] env[61998]: DEBUG nova.network.neutron [-] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 553.685901] env[61998]: DEBUG nova.network.neutron [-] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.722171] env[61998]: DEBUG nova.network.neutron [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.845034] env[61998]: DEBUG nova.network.neutron [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.019191] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.028122] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 2aabbd53-4c4d-4b53-8135-34cc5a17fd47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.075193] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Releasing lock "refresh_cache-4cafaebd-b577-4a7e-92b8-151445f66a0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.075193] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 554.075193] env[61998]: DEBUG nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 554.075193] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 554.095721] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.190675] env[61998]: DEBUG nova.network.neutron [-] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.239255] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Acquiring lock "b4706725-5e28-4d2a-b4a8-7633ffa63afe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.240567] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Lock "b4706725-5e28-4d2a-b4a8-7633ffa63afe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.316828] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.348500] env[61998]: DEBUG oslo_concurrency.lockutils [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] Releasing lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.349203] env[61998]: DEBUG nova.compute.manager [req-437e9ab3-e64f-40b6-9539-6d4dc3edc454 req-2aa57de8-468e-464d-9e68-f6d19821a8fc service nova] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Received event network-vif-deleted-583592a8-3db1-4ec9-9a9f-79479d91196d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 554.531383] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 4cafaebd-b577-4a7e-92b8-151445f66a0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 554.531486] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 81bbe6ae-87b2-414b-a872-4e03285abf92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.531605] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 687eaa4d-012a-4dd9-9033-16fad3650a56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.531728] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.531839] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.531965] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 782f1eba-459c-4249-b74a-128b22c64ca3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.532086] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance ac0209e5-66d0-4a04-892d-85eba3c3663a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.532211] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 0fd8d146-79fe-4e2a-90a3-67d457fc570f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.600301] env[61998]: DEBUG nova.network.neutron [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.638062] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "70af5d32-254f-4819-8cca-c28346e48139" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.638302] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "70af5d32-254f-4819-8cca-c28346e48139" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.698212] env[61998]: INFO nova.compute.manager [-] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Took 1.04 seconds to deallocate network for instance. [ 554.820591] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Releasing lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.821023] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 554.821252] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 554.821557] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-525d7c9f-f2e1-40d4-9261-3043f41ed47a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.835170] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef595ef-2d39-46ef-8dbe-8df2a7e3f4a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.858668] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0fd8d146-79fe-4e2a-90a3-67d457fc570f could not be found. [ 554.858787] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 554.859482] env[61998]: INFO nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 554.859482] env[61998]: DEBUG oslo.service.loopingcall [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.859603] env[61998]: DEBUG nova.compute.manager [-] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 554.859716] env[61998]: DEBUG nova.network.neutron [-] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 554.897900] env[61998]: DEBUG nova.network.neutron [-] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 555.046856] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance e4ada227-b79a-457a-b063-dde99840aa14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.103056] env[61998]: INFO nova.compute.manager [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] Took 1.03 seconds to deallocate network for instance. [ 555.205627] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.403760] env[61998]: DEBUG nova.network.neutron [-] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.446121] env[61998]: DEBUG nova.compute.manager [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Received event network-changed-3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 555.446121] env[61998]: DEBUG nova.compute.manager [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Refreshing instance network info cache due to event network-changed-3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 555.447242] env[61998]: DEBUG oslo_concurrency.lockutils [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] Acquiring lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.447886] env[61998]: DEBUG oslo_concurrency.lockutils [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] Acquired lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.448204] env[61998]: DEBUG nova.network.neutron [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Refreshing network info cache for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 555.546832] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance ef129347-9ea0-4615-b897-f51e664da1a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.908280] env[61998]: INFO nova.compute.manager [-] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Took 1.05 seconds to deallocate network for instance. [ 555.910735] env[61998]: DEBUG nova.compute.claims [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 555.910905] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.973014] env[61998]: DEBUG nova.network.neutron [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.051332] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 59330fd4-c362-4593-824d-d40c00f3f5d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.063848] env[61998]: DEBUG nova.network.neutron [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.149923] env[61998]: INFO nova.scheduler.client.report [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Deleted allocations for instance 4cafaebd-b577-4a7e-92b8-151445f66a0d [ 556.556672] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance df154c2a-3616-442d-abb0-83e68cf1141d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.570378] env[61998]: DEBUG oslo_concurrency.lockutils [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] Releasing lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.570378] env[61998]: DEBUG nova.compute.manager [req-9236ee96-b467-41cb-9fa1-09a862f31089 req-ff3f39bc-18f4-4a50-a62b-19c5cce63bdd service nova] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Received event network-vif-deleted-3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 556.659808] env[61998]: DEBUG oslo_concurrency.lockutils [None req-243821ae-4d96-437c-89f9-e82ed55f78b7 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "4cafaebd-b577-4a7e-92b8-151445f66a0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.064s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.662638] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "4cafaebd-b577-4a7e-92b8-151445f66a0d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.358s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.668989] env[61998]: INFO nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 4cafaebd-b577-4a7e-92b8-151445f66a0d] During sync_power_state the instance has a pending task (spawning). Skip. [ 556.668989] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "4cafaebd-b577-4a7e-92b8-151445f66a0d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.004s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.061885] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance a8f6254f-b867-4967-b4fa-bb70f471f89d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.168107] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 557.565420] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance ad2f23df-c067-4d30-b143-e50ebcc50d4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.704091] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.070815] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance e37ac276-8a3e-45b3-8176-d972eb4e6e3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.574098] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 975b0c65-6f57-4c7c-ae46-b23920a039f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.034159] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "8672c282-1a66-49b5-9c22-7136b567a52c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.034159] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "8672c282-1a66-49b5-9c22-7136b567a52c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.079569] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 6236c44a-e3c6-4302-8f15-4eb8dfaf5960 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.585717] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 71248677-92fb-4f66-b089-2cbbdc808bb7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 560.092387] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 560.598918] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance b4706725-5e28-4d2a-b4a8-7633ffa63afe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 560.598918] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 560.598918] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 561.018968] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcffae0e-126a-4490-a2e8-42be8b5f6fb8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.026993] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4666ab2-c541-4eaf-9b2b-154afbec6113 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.061712] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6bd2c3-3263-4ff2-b26a-157d34c50abb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.069373] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c436a957-f0be-4ebc-896d-65c0aeb98205 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.082855] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.434492] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "1a91b0c5-d852-424a-b576-3d2c76860b06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.434492] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "1a91b0c5-d852-424a-b576-3d2c76860b06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.586578] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 562.093736] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 562.093736] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.110s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.093947] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.053s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.097023] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 562.097352] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Getting list of instances from cluster (obj){ [ 562.097352] env[61998]: value = "domain-c8" [ 562.097352] env[61998]: _type = "ClusterComputeResource" [ 562.097352] env[61998]: } {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 562.098761] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310bdf5b-721c-4aed-b118-0871e53f159c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.108964] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Got total of 0 instances {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 562.234099] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "0d680f38-bd47-4aeb-8845-efa20667623b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.234345] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "0d680f38-bd47-4aeb-8845-efa20667623b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.048753] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1590e99-c5dc-41ee-a1f6-e1a6a2634f19 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.057603] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0d60aa-b523-4a8f-98fb-4562df8478e2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.091479] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914e75c0-f3de-48c3-b683-7b01f7dd5e42 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.099102] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d33bac5-1236-4009-8e4a-dda1b877527c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.113448] env[61998]: DEBUG nova.compute.provider_tree [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.619085] env[61998]: DEBUG nova.scheduler.client.report [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 564.127767] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.032s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.127767] env[61998]: ERROR nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. [ 564.127767] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Traceback (most recent call last): [ 564.127767] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 564.127767] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self.driver.spawn(context, instance, image_meta, [ 564.127767] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 564.127767] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.127767] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.127767] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] vm_ref = self.build_virtual_machine(instance, [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] for vif in network_info: [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return self._sync_wrapper(fn, *args, **kwargs) [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self.wait() [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self[:] = self._gt.wait() [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return self._exit_event.wait() [ 564.128901] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] result = hub.switch() [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return self.greenlet.switch() [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] result = function(*args, **kwargs) [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] return func(*args, **kwargs) [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] raise e [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] nwinfo = self.network_api.allocate_for_instance( [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.129381] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] created_port_ids = self._update_ports_for_instance( [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] with excutils.save_and_reraise_exception(): [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] self.force_reraise() [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] raise self.value [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] updated_port = self._update_port( [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] _ensure_no_port_binding_failure(port) [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.129810] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] raise exception.PortBindingFailed(port_id=port['id']) [ 564.130173] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] nova.exception.PortBindingFailed: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. [ 564.130173] env[61998]: ERROR nova.compute.manager [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] [ 564.130173] env[61998]: DEBUG nova.compute.utils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 564.130173] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.523s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.133505] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Build of instance 81bbe6ae-87b2-414b-a872-4e03285abf92 was re-scheduled: Binding failed for port 6c9256dc-277c-45e9-aac5-1754cf36080b, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 564.133505] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 564.133615] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.134592] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquired lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.134592] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.662356] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.693643] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "e730a03f-64c0-4e94-bc66-d3006be8b3ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.696879] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "e730a03f-64c0-4e94-bc66-d3006be8b3ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.724047] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "dff2b45c-bf45-4b22-b78f-287019b483f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.724047] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "dff2b45c-bf45-4b22-b78f-287019b483f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.771735] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "6643cc70-7e92-41e9-b2dc-c531a331086f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.773025] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "6643cc70-7e92-41e9-b2dc-c531a331086f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.904779] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.222492] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ba2412-a9af-4258-b5b7-fe0683db9389 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.231534] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfc4a7c-8c20-4c3f-a8ff-cda36c523ace {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.273021] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6768e18a-1756-44d6-ab92-e256890cba88 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.282717] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c28775-098a-4821-b7d6-ef778e132e25 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.296673] env[61998]: DEBUG nova.compute.provider_tree [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.413548] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Releasing lock "refresh_cache-81bbe6ae-87b2-414b-a872-4e03285abf92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.413836] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 565.414060] env[61998]: DEBUG nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 565.414495] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 565.450081] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.800254] env[61998]: DEBUG nova.scheduler.client.report [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 565.955162] env[61998]: DEBUG nova.network.neutron [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.307724] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.176s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.307724] env[61998]: ERROR nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. [ 566.307724] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Traceback (most recent call last): [ 566.307724] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 566.307724] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self.driver.spawn(context, instance, image_meta, [ 566.307724] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 566.307724] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self._vmops.spawn(context, instance, image_meta, injected_files, [ 566.307724] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 566.307724] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] vm_ref = self.build_virtual_machine(instance, [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] vif_infos = vmwarevif.get_vif_info(self._session, [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] for vif in network_info: [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return self._sync_wrapper(fn, *args, **kwargs) [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self.wait() [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self[:] = self._gt.wait() [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return self._exit_event.wait() [ 566.308306] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] result = hub.switch() [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return self.greenlet.switch() [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] result = function(*args, **kwargs) [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] return func(*args, **kwargs) [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] raise e [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] nwinfo = self.network_api.allocate_for_instance( [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.309185] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] created_port_ids = self._update_ports_for_instance( [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] with excutils.save_and_reraise_exception(): [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] self.force_reraise() [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] raise self.value [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] updated_port = self._update_port( [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] _ensure_no_port_binding_failure(port) [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.309557] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] raise exception.PortBindingFailed(port_id=port['id']) [ 566.311508] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] nova.exception.PortBindingFailed: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. [ 566.311508] env[61998]: ERROR nova.compute.manager [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] [ 566.311508] env[61998]: DEBUG nova.compute.utils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 566.311508] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.622s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.317152] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Build of instance 687eaa4d-012a-4dd9-9033-16fad3650a56 was re-scheduled: Binding failed for port c8e229b1-2459-48bd-81ff-487cca8a8b31, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 566.317216] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 566.317452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Acquiring lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.317602] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Acquired lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.321218] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 566.459129] env[61998]: INFO nova.compute.manager [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] Took 1.04 seconds to deallocate network for instance. [ 566.846265] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.942031] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.258623] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5baaac9-0547-42e6-981d-7097dba02612 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.266379] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2542721-7381-429a-97b7-e9e716cedb21 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.308246] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7424e2b9-10d3-4e2c-8a37-1996001d37f7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.320025] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0f23e5-c4a8-473a-b6b1-da28e35868ae {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.332860] env[61998]: DEBUG nova.compute.provider_tree [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.446982] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Releasing lock "refresh_cache-687eaa4d-012a-4dd9-9033-16fad3650a56" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.446982] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 567.446982] env[61998]: DEBUG nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 567.447431] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.470276] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.510102] env[61998]: INFO nova.scheduler.client.report [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Deleted allocations for instance 81bbe6ae-87b2-414b-a872-4e03285abf92 [ 567.837125] env[61998]: DEBUG nova.scheduler.client.report [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 567.976636] env[61998]: DEBUG nova.network.neutron [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.019606] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e55669d2-acfe-4fe6-a1e3-3ecfd855ccd8 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "81bbe6ae-87b2-414b-a872-4e03285abf92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.137s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.021384] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "81bbe6ae-87b2-414b-a872-4e03285abf92" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 36.716s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.021384] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39d98293-1be6-40ec-bede-e683a492d115 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.035456] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2798e61-8cb5-4b15-b6c1-3813e0b2ba9a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.305711] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "c6837b87-b01c-454c-b986-6f9fa57656bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.305935] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "c6837b87-b01c-454c-b986-6f9fa57656bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.345870] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.036s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.346528] env[61998]: ERROR nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Traceback (most recent call last): [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self.driver.spawn(context, instance, image_meta, [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] vm_ref = self.build_virtual_machine(instance, [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] vif_infos = vmwarevif.get_vif_info(self._session, [ 568.346528] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] for vif in network_info: [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return self._sync_wrapper(fn, *args, **kwargs) [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self.wait() [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self[:] = self._gt.wait() [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return self._exit_event.wait() [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] result = hub.switch() [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 568.347087] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return self.greenlet.switch() [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] result = function(*args, **kwargs) [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] return func(*args, **kwargs) [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] raise e [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] nwinfo = self.network_api.allocate_for_instance( [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] created_port_ids = self._update_ports_for_instance( [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] with excutils.save_and_reraise_exception(): [ 568.347500] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] self.force_reraise() [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] raise self.value [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] updated_port = self._update_port( [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] _ensure_no_port_binding_failure(port) [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] raise exception.PortBindingFailed(port_id=port['id']) [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] nova.exception.PortBindingFailed: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. [ 568.347907] env[61998]: ERROR nova.compute.manager [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] [ 568.348261] env[61998]: DEBUG nova.compute.utils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 568.349273] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.607s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.351557] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Build of instance d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f was re-scheduled: Binding failed for port b2f2be33-0c12-458e-a815-2fa188123fbb, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 568.351971] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 568.352200] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Acquiring lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.352337] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Acquired lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.352486] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 568.479485] env[61998]: INFO nova.compute.manager [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] [instance: 687eaa4d-012a-4dd9-9033-16fad3650a56] Took 1.03 seconds to deallocate network for instance. [ 568.522811] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 568.568163] env[61998]: INFO nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 81bbe6ae-87b2-414b-a872-4e03285abf92] During the sync_power process the instance has moved from host None to host cpu-1 [ 568.568338] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "81bbe6ae-87b2-414b-a872-4e03285abf92" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.548s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.904741] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.050150] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.138544] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.306160] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.306456] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.363360] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df59481-ce05-4473-93db-0154cef611b6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.370822] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a947dfd2-6ea7-4c34-a498-5183db6c1f7d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.402924] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b40f3e-3e88-4404-8b58-f918892bb5b6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.411251] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb459a17-25fc-48a9-9e5a-5a06b2db5c33 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.425952] env[61998]: DEBUG nova.compute.provider_tree [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.523022] env[61998]: INFO nova.scheduler.client.report [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Deleted allocations for instance 687eaa4d-012a-4dd9-9033-16fad3650a56 [ 569.644189] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Releasing lock "refresh_cache-d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.644189] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 569.644468] env[61998]: DEBUG nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 569.644570] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.672710] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.931098] env[61998]: DEBUG nova.scheduler.client.report [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 570.036566] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d6c2fb88-aed4-43e9-a470-de6460589e9e tempest-ServersAdminNegativeTestJSON-740884206 tempest-ServersAdminNegativeTestJSON-740884206-project-member] Lock "687eaa4d-012a-4dd9-9033-16fad3650a56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.482s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.177940] env[61998]: DEBUG nova.network.neutron [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.286613] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "297c345a-a825-47b1-a9e4-a353758d32ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.286952] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "297c345a-a825-47b1-a9e4-a353758d32ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.435946] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.087s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.436625] env[61998]: ERROR nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Traceback (most recent call last): [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self.driver.spawn(context, instance, image_meta, [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] vm_ref = self.build_virtual_machine(instance, [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.436625] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] for vif in network_info: [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return self._sync_wrapper(fn, *args, **kwargs) [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self.wait() [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self[:] = self._gt.wait() [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return self._exit_event.wait() [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] result = hub.switch() [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.436969] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return self.greenlet.switch() [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] result = function(*args, **kwargs) [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] return func(*args, **kwargs) [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] raise e [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] nwinfo = self.network_api.allocate_for_instance( [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] created_port_ids = self._update_ports_for_instance( [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] with excutils.save_and_reraise_exception(): [ 570.437324] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] self.force_reraise() [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] raise self.value [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] updated_port = self._update_port( [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] _ensure_no_port_binding_failure(port) [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] raise exception.PortBindingFailed(port_id=port['id']) [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] nova.exception.PortBindingFailed: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. [ 570.437728] env[61998]: ERROR nova.compute.manager [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] [ 570.438088] env[61998]: DEBUG nova.compute.utils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 570.438575] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.374s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.441200] env[61998]: INFO nova.compute.claims [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.443696] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Build of instance f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e was re-scheduled: Binding failed for port 4dcbe70e-1499-4982-bf91-78ea40b01f17, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 570.444132] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 570.444350] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Acquiring lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.444493] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Acquired lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.444650] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 570.539050] env[61998]: DEBUG nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 570.680367] env[61998]: INFO nova.compute.manager [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] [instance: d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f] Took 1.04 seconds to deallocate network for instance. [ 570.973184] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.064218] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.194422] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.701591] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Releasing lock "refresh_cache-f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.702181] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 571.702472] env[61998]: DEBUG nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 571.702736] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 571.729395] env[61998]: INFO nova.scheduler.client.report [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Deleted allocations for instance d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f [ 571.735925] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.995225] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc3536e-6bb2-4fad-affd-3cdf74a99e39 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.011191] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bc540b-d691-444a-a74d-8f95f52250c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.049868] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa89156-3975-43f2-83eb-ee6caf634df2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.058473] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d10af9d-064a-48eb-b5ff-a0c00a346811 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.073489] env[61998]: DEBUG nova.compute.provider_tree [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.239120] env[61998]: DEBUG nova.network.neutron [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.241846] env[61998]: DEBUG oslo_concurrency.lockutils [None req-17d52734-ff3f-458b-9c1d-259ee3d4f1e5 tempest-ImagesOneServerNegativeTestJSON-998658280 tempest-ImagesOneServerNegativeTestJSON-998658280-project-member] Lock "d2e4c9d6-7dec-4a54-bc26-84cfbdab6d8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.291s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.579101] env[61998]: DEBUG nova.scheduler.client.report [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 572.744109] env[61998]: INFO nova.compute.manager [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] [instance: f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e] Took 1.04 seconds to deallocate network for instance. [ 572.746716] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 573.086708] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.647s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.087340] env[61998]: DEBUG nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 573.094338] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.347s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.273809] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.604068] env[61998]: DEBUG nova.compute.utils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 573.606305] env[61998]: DEBUG nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Not allocating networking since 'none' was specified. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 573.787955] env[61998]: INFO nova.scheduler.client.report [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Deleted allocations for instance f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e [ 574.070643] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d06b17-36e8-4530-bf37-508c404c7423 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.078713] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114bf864-3383-4545-ae44-fcd2636e5196 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.112307] env[61998]: DEBUG nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 574.113705] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de4274c-1c24-4e06-aabd-9cc0b7406096 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.124683] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4589e3f3-2c32-49ab-b546-a1bb397f8fd7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.137971] env[61998]: DEBUG nova.compute.provider_tree [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.300629] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6dccb2d3-b3e9-4fb8-a351-401ae726b156 tempest-ServerDiagnosticsTest-125119359 tempest-ServerDiagnosticsTest-125119359-project-member] Lock "f9d92b1b-59ed-4f3c-9e5c-2b894b123e9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.253s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.466978] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Acquiring lock "35b6490b-eec9-4dc1-9de3-63c368bdc5d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.467237] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Lock "35b6490b-eec9-4dc1-9de3-63c368bdc5d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.640667] env[61998]: DEBUG nova.scheduler.client.report [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 574.809618] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 575.128209] env[61998]: DEBUG nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 575.145192] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.051s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.146663] env[61998]: ERROR nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Traceback (most recent call last): [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self.driver.spawn(context, instance, image_meta, [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] vm_ref = self.build_virtual_machine(instance, [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] vif_infos = vmwarevif.get_vif_info(self._session, [ 575.146663] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] for vif in network_info: [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return self._sync_wrapper(fn, *args, **kwargs) [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self.wait() [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self[:] = self._gt.wait() [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return self._exit_event.wait() [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] result = hub.switch() [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 575.147129] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return self.greenlet.switch() [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] result = function(*args, **kwargs) [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] return func(*args, **kwargs) [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] raise e [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] nwinfo = self.network_api.allocate_for_instance( [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] created_port_ids = self._update_ports_for_instance( [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] with excutils.save_and_reraise_exception(): [ 575.147551] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] self.force_reraise() [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] raise self.value [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] updated_port = self._update_port( [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] _ensure_no_port_binding_failure(port) [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] raise exception.PortBindingFailed(port_id=port['id']) [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] nova.exception.PortBindingFailed: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. [ 575.150046] env[61998]: ERROR nova.compute.manager [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] [ 575.151092] env[61998]: DEBUG nova.compute.utils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 575.151092] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.359s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.155829] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Build of instance 782f1eba-459c-4249-b74a-128b22c64ca3 was re-scheduled: Binding failed for port ab247aa6-38cc-4ff4-bdeb-351ba048048e, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 575.156348] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 575.156610] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Acquiring lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.156761] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Acquired lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.156919] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 575.168679] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 575.168923] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 575.169094] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.169279] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 575.169422] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.169568] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 575.169776] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 575.169931] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 575.170323] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 575.170323] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 575.170445] env[61998]: DEBUG nova.virt.hardware [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 575.171474] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200d9a86-26f8-4397-9f22-1f1c822a9070 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.180495] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cacf89b-ce0f-46cb-b214-2e581fea0a51 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.196060] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 575.204713] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Creating folder: Project (f41db78d015a4fe3a630a95f941c66ef). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 575.204713] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ba94e94-cbd5-49d1-b5a5-d157bf9b6c1e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.213825] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Created folder: Project (f41db78d015a4fe3a630a95f941c66ef) in parent group-v294665. [ 575.214064] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Creating folder: Instances. Parent ref: group-v294675. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 575.214289] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7eb2f727-c517-4012-a819-edc5b80461c1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.222613] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Created folder: Instances in parent group-v294675. [ 575.222839] env[61998]: DEBUG oslo.service.loopingcall [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.223027] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 575.225604] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c2e4f06-a60c-4086-8efe-5222bbbf0d24 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.240837] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 575.240837] env[61998]: value = "task-1388379" [ 575.240837] env[61998]: _type = "Task" [ 575.240837] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.249108] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388379, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.345270] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.694471] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.753663] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388379, 'name': CreateVM_Task, 'duration_secs': 0.394205} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.753910] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 575.757895] env[61998]: DEBUG oslo_vmware.service [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c28abc-268c-4646-9204-dfbd66a74cba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.767029] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 575.767569] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.769306] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 575.769306] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1ee29eb-117a-4205-bfa7-c306979add30 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.774656] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 575.774656] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5229b2cc-9925-14fb-2a78-0c645d113899" [ 575.774656] env[61998]: _type = "Task" [ 575.774656] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.787160] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5229b2cc-9925-14fb-2a78-0c645d113899, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.826908] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.151982] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220bc676-2feb-45a4-acb9-08aa38526249 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.159829] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c67da49-f6cb-4d32-961c-e31ba93d3147 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.192563] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a55bfa5-c26f-44b2-89f8-4e1c3bcad0e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.200489] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4bae95-a609-4f3e-9ed4-d28ab79afe3d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.214323] env[61998]: DEBUG nova.compute.provider_tree [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.285126] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5229b2cc-9925-14fb-2a78-0c645d113899, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.330318] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Releasing lock "refresh_cache-782f1eba-459c-4249-b74a-128b22c64ca3" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.330621] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 576.330814] env[61998]: DEBUG nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 576.330979] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 576.372924] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.718111] env[61998]: DEBUG nova.scheduler.client.report [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 576.792027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.792027] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 576.792027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.792027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.792407] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 576.792407] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6af3a9e-0cb1-43c9-b463-a5d6c7d67384 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.810240] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.810240] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 576.810240] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31280663-acd1-4dee-8ce9-1f419ce8dab6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.817843] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-162b1676-c645-41f3-b6e5-d2f59b0846f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.826028] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 576.826028] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cf96b0-1c8d-8b6a-0d7d-820015657547" [ 576.826028] env[61998]: _type = "Task" [ 576.826028] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.832110] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cf96b0-1c8d-8b6a-0d7d-820015657547, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.877771] env[61998]: DEBUG nova.network.neutron [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.228505] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.079s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.229985] env[61998]: ERROR nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Traceback (most recent call last): [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self.driver.spawn(context, instance, image_meta, [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] vm_ref = self.build_virtual_machine(instance, [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] vif_infos = vmwarevif.get_vif_info(self._session, [ 577.229985] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] for vif in network_info: [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return self._sync_wrapper(fn, *args, **kwargs) [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self.wait() [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self[:] = self._gt.wait() [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return self._exit_event.wait() [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] result = hub.switch() [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 577.231341] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return self.greenlet.switch() [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] result = function(*args, **kwargs) [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] return func(*args, **kwargs) [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] raise e [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] nwinfo = self.network_api.allocate_for_instance( [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] created_port_ids = self._update_ports_for_instance( [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] with excutils.save_and_reraise_exception(): [ 577.231706] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] self.force_reraise() [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] raise self.value [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] updated_port = self._update_port( [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] _ensure_no_port_binding_failure(port) [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] raise exception.PortBindingFailed(port_id=port['id']) [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] nova.exception.PortBindingFailed: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. [ 577.232116] env[61998]: ERROR nova.compute.manager [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] [ 577.233870] env[61998]: DEBUG nova.compute.utils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 577.233870] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.716s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.235937] env[61998]: INFO nova.compute.claims [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 577.237864] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Build of instance ac0209e5-66d0-4a04-892d-85eba3c3663a was re-scheduled: Binding failed for port 583592a8-3db1-4ec9-9a9f-79479d91196d, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 577.239271] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 577.239271] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Acquiring lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.239271] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Acquired lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.239271] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 577.338025] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Preparing fetch location {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 577.338025] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Creating directory with path [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 577.338025] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51a72430-4686-4897-bd2a-dad8aacd7e29 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.360456] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Created directory with path [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 577.363565] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Fetch image to [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 577.363565] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Downloading image file data a90c4a31-8bcc-48cf-ada7-7369ab14c460 to [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk on the data store datastore1 {{(pid=61998) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 577.363565] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35d8fa2-88bf-4adc-8fa6-f48de353394d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.372922] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfe61b8-ee04-4a7c-aea9-846c0f7fea00 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.390688] env[61998]: INFO nova.compute.manager [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] [instance: 782f1eba-459c-4249-b74a-128b22c64ca3] Took 1.06 seconds to deallocate network for instance. [ 577.395263] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984100bb-c685-43b2-bceb-6abdb5b6bc7b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.438786] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bbb247-2b93-49fa-a21a-c89cac6cf4e1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.448230] env[61998]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cb99c6b1-ab8b-40f1-bde0-d0b425505da9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.472274] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Downloading image file data a90c4a31-8bcc-48cf-ada7-7369ab14c460 to the data store datastore1 {{(pid=61998) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 577.556925] env[61998]: DEBUG oslo_vmware.rw_handles [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61998) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 577.629551] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "9da95edb-f9fb-40f3-9317-d27f1bae0ecf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.629726] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "9da95edb-f9fb-40f3-9317-d27f1bae0ecf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.791836] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.068021] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.322634] env[61998]: DEBUG oslo_vmware.rw_handles [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Completed reading data from the image iterator. {{(pid=61998) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 578.323101] env[61998]: DEBUG oslo_vmware.rw_handles [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61998) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 578.391034] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Downloaded image file data a90c4a31-8bcc-48cf-ada7-7369ab14c460 to vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk on the data store datastore1 {{(pid=61998) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 578.395873] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Caching image {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 578.397708] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Copying Virtual Disk [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk to [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 578.397708] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91f2513c-d2fa-4690-9349-80c8db56e917 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.412835] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 578.412835] env[61998]: value = "task-1388382" [ 578.412835] env[61998]: _type = "Task" [ 578.412835] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.431245] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.446949] env[61998]: INFO nova.scheduler.client.report [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Deleted allocations for instance 782f1eba-459c-4249-b74a-128b22c64ca3 [ 578.569272] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Releasing lock "refresh_cache-ac0209e5-66d0-4a04-892d-85eba3c3663a" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.569523] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 578.569708] env[61998]: DEBUG nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 578.569893] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 578.598709] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.829099] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088c9cff-595f-41c0-ae8f-4df906b37ada {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.837524] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ddaef6-d88a-436a-beaf-8eb23ee18d9b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.876482] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11665aa2-99d2-4303-8555-ffb5ee3723a1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.885242] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33397157-50b9-40a5-b560-f272be8a338c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.901133] env[61998]: DEBUG nova.compute.provider_tree [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.923259] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388382, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.964856] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b2a61d9c-3460-4ce7-983e-d48ba0c76521 tempest-VolumesAssistedSnapshotsTest-1080540270 tempest-VolumesAssistedSnapshotsTest-1080540270-project-member] Lock "782f1eba-459c-4249-b74a-128b22c64ca3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.281s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.102271] env[61998]: DEBUG nova.network.neutron [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.407116] env[61998]: DEBUG nova.scheduler.client.report [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 579.432210] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388382, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.912694} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.433743] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Copied Virtual Disk [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk to [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 579.435748] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleting the datastore file [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460/tmp-sparse.vmdk {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 579.435748] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a6e296f-4c3d-48c4-ad6a-b952134140f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.443675] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 579.443675] env[61998]: value = "task-1388384" [ 579.443675] env[61998]: _type = "Task" [ 579.443675] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.452327] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.468281] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 579.608849] env[61998]: INFO nova.compute.manager [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] [instance: ac0209e5-66d0-4a04-892d-85eba3c3663a] Took 1.04 seconds to deallocate network for instance. [ 579.912328] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.913103] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 579.917173] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.710s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.917173] env[61998]: DEBUG nova.objects.instance [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lazy-loading 'resources' on Instance uuid 2aabbd53-4c4d-4b53-8135-34cc5a17fd47 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 579.956437] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04552} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.956624] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 579.957695] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Moving file from [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8/a90c4a31-8bcc-48cf-ada7-7369ab14c460 to [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460. {{(pid=61998) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 579.957695] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-43db38b7-f850-4c4b-b6de-c98cc0f0db3a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.964991] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 579.964991] env[61998]: value = "task-1388385" [ 579.964991] env[61998]: _type = "Task" [ 579.964991] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.976724] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388385, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.004392] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.419550] env[61998]: DEBUG nova.compute.utils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 580.424504] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 580.424504] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 580.478586] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388385, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028223} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.482786] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] File moved {{(pid=61998) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 580.483034] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Cleaning up location [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 580.483190] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleting the datastore file [datastore1] vmware_temp/35c6aa14-1a78-46e5-a668-27fdb2b59ea8 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 580.483642] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7c65184-87d9-4523-9295-aa3b16d11b97 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.491421] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 580.491421] env[61998]: value = "task-1388387" [ 580.491421] env[61998]: _type = "Task" [ 580.491421] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.499869] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.582516] env[61998]: DEBUG nova.policy [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25919d91b6fe4f31a85b4109149e261b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df09ba4531ae4b1e8e83f9b382b82c5c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 580.654346] env[61998]: INFO nova.scheduler.client.report [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Deleted allocations for instance ac0209e5-66d0-4a04-892d-85eba3c3663a [ 580.923938] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 580.959603] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ab21f4-a86e-46ef-80b9-3ed9fd56e33b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.971326] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ded3c75-a161-4033-a638-ed7f40f4c46e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.017316] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c999b174-8780-4c01-b19b-6f1310d567f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.029692] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1c960a-d75f-46e0-9e73-eed07bbfe54f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.033804] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024928} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.034663] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 581.035764] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42973af6-fb90-4d37-9af6-776de7ddade4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.046478] env[61998]: DEBUG nova.compute.provider_tree [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.053765] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Acquiring lock "a8101e8d-55d0-4f70-9119-f5e176ba8212" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.053765] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Lock "a8101e8d-55d0-4f70-9119-f5e176ba8212" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.053765] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 581.053765] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5268a1f8-d709-d3c2-bab5-65a16a44e69b" [ 581.053765] env[61998]: _type = "Task" [ 581.053765] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.061962] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5268a1f8-d709-d3c2-bab5-65a16a44e69b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.169307] env[61998]: DEBUG oslo_concurrency.lockutils [None req-527372e2-5791-4d24-b66b-c50eb05e638f tempest-FloatingIPsAssociationNegativeTestJSON-1894237132 tempest-FloatingIPsAssociationNegativeTestJSON-1894237132-project-member] Lock "ac0209e5-66d0-4a04-892d-85eba3c3663a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.871s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.236920] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Successfully created port: b60cebfb-028c-4c99-b037-df42b62daac9 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.351795] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "f0a011bb-4939-4384-885c-6ce482875b4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.352714] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "f0a011bb-4939-4384-885c-6ce482875b4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.553179] env[61998]: DEBUG nova.scheduler.client.report [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 581.566885] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5268a1f8-d709-d3c2-bab5-65a16a44e69b, 'name': SearchDatastore_Task, 'duration_secs': 0.018578} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.567510] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.567510] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 581.567510] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50d9bc8e-942a-44f6-8ca0-e587a3590285 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.577891] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 581.577891] env[61998]: value = "task-1388389" [ 581.577891] env[61998]: _type = "Task" [ 581.577891] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.593464] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388389, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.672513] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 581.945242] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 581.979843] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.980515] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.980862] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.981209] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.981467] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.981744] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.982166] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.982612] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.982942] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.984683] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.984683] env[61998]: DEBUG nova.virt.hardware [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.985800] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d18bbea-4687-4bf4-b31e-b9ff579acdfe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.002025] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da69f24d-1010-4ccc-b9e3-6270a3cb2ae8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.060475] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.144s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.063636] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.152s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.092022] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388389, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.102838] env[61998]: INFO nova.scheduler.client.report [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Deleted allocations for instance 2aabbd53-4c4d-4b53-8135-34cc5a17fd47 [ 582.197765] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.599060] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388389, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597571} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.599683] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 582.599683] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 582.599806] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d87641a1-1c0a-4b99-95f9-0851e8bcad08 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.607214] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 582.607214] env[61998]: value = "task-1388391" [ 582.607214] env[61998]: _type = "Task" [ 582.607214] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.618710] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa15a990-b57e-41bf-b1cf-134049012994 tempest-ServerDiagnosticsV248Test-1512693895 tempest-ServerDiagnosticsV248Test-1512693895-project-member] Lock "2aabbd53-4c4d-4b53-8135-34cc5a17fd47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.195s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.623363] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388391, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.088918] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17276315-fd41-499e-a0a4-763dac737bd7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.103320] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62bf0b1-6e53-4b98-907b-fd93594a4951 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.147519] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873a94de-6faf-4f8e-a5c0-d00305d35af6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.152211] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388391, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066768} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.152893] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 583.153778] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55f812f-070d-41c6-a4eb-bdbf7bd981b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.160970] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ee585f-a31d-476e-9f1b-ed35a1de5634 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.183282] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 583.183282] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17b58ee1-f94d-4ee2-abf8-57c7c9c4ea7b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.205985] env[61998]: DEBUG nova.compute.provider_tree [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.213184] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 583.213184] env[61998]: value = "task-1388392" [ 583.213184] env[61998]: _type = "Task" [ 583.213184] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.230859] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388392, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.576415] env[61998]: ERROR nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. [ 583.576415] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 583.576415] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.576415] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 583.576415] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.576415] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 583.576415] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.576415] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 583.576415] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.576415] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 583.576415] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.576415] env[61998]: ERROR nova.compute.manager raise self.value [ 583.576415] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.576415] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 583.576415] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.576415] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 583.576991] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.576991] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 583.576991] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. [ 583.576991] env[61998]: ERROR nova.compute.manager [ 583.576991] env[61998]: Traceback (most recent call last): [ 583.576991] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 583.576991] env[61998]: listener.cb(fileno) [ 583.576991] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.576991] env[61998]: result = function(*args, **kwargs) [ 583.576991] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.576991] env[61998]: return func(*args, **kwargs) [ 583.576991] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 583.576991] env[61998]: raise e [ 583.576991] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.576991] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 583.576991] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.576991] env[61998]: created_port_ids = self._update_ports_for_instance( [ 583.576991] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.576991] env[61998]: with excutils.save_and_reraise_exception(): [ 583.576991] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.576991] env[61998]: self.force_reraise() [ 583.576991] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.576991] env[61998]: raise self.value [ 583.576991] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.576991] env[61998]: updated_port = self._update_port( [ 583.576991] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.576991] env[61998]: _ensure_no_port_binding_failure(port) [ 583.576991] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.576991] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 583.577977] env[61998]: nova.exception.PortBindingFailed: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. [ 583.577977] env[61998]: Removing descriptor: 15 [ 583.577977] env[61998]: ERROR nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Traceback (most recent call last): [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] yield resources [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self.driver.spawn(context, instance, image_meta, [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.577977] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] vm_ref = self.build_virtual_machine(instance, [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] for vif in network_info: [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return self._sync_wrapper(fn, *args, **kwargs) [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self.wait() [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self[:] = self._gt.wait() [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return self._exit_event.wait() [ 583.578384] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] result = hub.switch() [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return self.greenlet.switch() [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] result = function(*args, **kwargs) [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return func(*args, **kwargs) [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] raise e [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] nwinfo = self.network_api.allocate_for_instance( [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.578808] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] created_port_ids = self._update_ports_for_instance( [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] with excutils.save_and_reraise_exception(): [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self.force_reraise() [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] raise self.value [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] updated_port = self._update_port( [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] _ensure_no_port_binding_failure(port) [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.579232] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] raise exception.PortBindingFailed(port_id=port['id']) [ 583.579621] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] nova.exception.PortBindingFailed: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. [ 583.579621] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] [ 583.579621] env[61998]: INFO nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Terminating instance [ 583.581028] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.581191] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.581357] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.607781] env[61998]: DEBUG nova.compute.manager [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Received event network-changed-b60cebfb-028c-4c99-b037-df42b62daac9 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 583.608087] env[61998]: DEBUG nova.compute.manager [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Refreshing instance network info cache due to event network-changed-b60cebfb-028c-4c99-b037-df42b62daac9. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 583.608087] env[61998]: DEBUG oslo_concurrency.lockutils [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] Acquiring lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.709495] env[61998]: DEBUG nova.scheduler.client.report [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 583.725039] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388392, 'name': ReconfigVM_Task, 'duration_secs': 0.30014} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.725513] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Reconfigured VM instance instance-0000000d to attach disk [datastore1] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 583.726650] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a64d27c-ff99-4b16-896c-71727cd4ec04 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.736326] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 583.736326] env[61998]: value = "task-1388394" [ 583.736326] env[61998]: _type = "Task" [ 583.736326] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.754626] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388394, 'name': Rename_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.113920] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.217214] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.154s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.218233] env[61998]: ERROR nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Traceback (most recent call last): [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self.driver.spawn(context, instance, image_meta, [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] vm_ref = self.build_virtual_machine(instance, [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.218233] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] for vif in network_info: [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return self._sync_wrapper(fn, *args, **kwargs) [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self.wait() [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self[:] = self._gt.wait() [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return self._exit_event.wait() [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] result = hub.switch() [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.218616] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return self.greenlet.switch() [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] result = function(*args, **kwargs) [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] return func(*args, **kwargs) [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] raise e [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] nwinfo = self.network_api.allocate_for_instance( [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] created_port_ids = self._update_ports_for_instance( [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] with excutils.save_and_reraise_exception(): [ 584.222079] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] self.force_reraise() [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] raise self.value [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] updated_port = self._update_port( [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] _ensure_no_port_binding_failure(port) [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] raise exception.PortBindingFailed(port_id=port['id']) [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] nova.exception.PortBindingFailed: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. [ 584.222475] env[61998]: ERROR nova.compute.manager [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] [ 584.222904] env[61998]: DEBUG nova.compute.utils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 584.222904] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.517s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.222904] env[61998]: INFO nova.compute.claims [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 584.226045] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Build of instance 0fd8d146-79fe-4e2a-90a3-67d457fc570f was re-scheduled: Binding failed for port 3f00a2b2-a1fd-4dfc-831c-ca4b575c65d3, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 584.226824] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 584.227112] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Acquiring lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.227301] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Acquired lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.227506] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 584.248046] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388394, 'name': Rename_Task, 'duration_secs': 0.146975} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.248799] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 584.250824] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-417214a0-fc66-4c2e-a3ba-d28f7d1df89c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.258346] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 584.258346] env[61998]: value = "task-1388395" [ 584.258346] env[61998]: _type = "Task" [ 584.258346] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.275509] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.358781] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.481484] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Acquiring lock "4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.481952] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Lock "4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.765674] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.774462] env[61998]: DEBUG oslo_vmware.api [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388395, 'name': PowerOnVM_Task, 'duration_secs': 0.428406} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.774893] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 584.774893] env[61998]: INFO nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Took 9.65 seconds to spawn the instance on the hypervisor. [ 584.775259] env[61998]: DEBUG nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 584.775853] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a634466c-58ab-4343-ad0c-ef5c5873802d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.864959] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Releasing lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.865404] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 584.865601] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 584.865907] env[61998]: DEBUG oslo_concurrency.lockutils [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] Acquired lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.866080] env[61998]: DEBUG nova.network.neutron [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Refreshing network info cache for port b60cebfb-028c-4c99-b037-df42b62daac9 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 584.867115] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61a1a446-fd4e-4279-b55c-7511385f0b65 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.880724] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a55eba2-f451-4b47-a298-2a8ceae3ab18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.904746] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef129347-9ea0-4615-b897-f51e664da1a7 could not be found. [ 584.905025] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 584.905235] env[61998]: INFO nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 584.905447] env[61998]: DEBUG oslo.service.loopingcall [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.905967] env[61998]: DEBUG nova.compute.manager [-] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 584.906715] env[61998]: DEBUG nova.network.neutron [-] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 584.945566] env[61998]: DEBUG nova.network.neutron [-] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.971301] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.072120] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Acquiring lock "f163fb1b-400f-4abb-8df6-0d9ea6449166" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.072370] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Lock "f163fb1b-400f-4abb-8df6-0d9ea6449166" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.297089] env[61998]: INFO nova.compute.manager [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Took 36.26 seconds to build instance. [ 585.401584] env[61998]: DEBUG nova.network.neutron [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.447164] env[61998]: DEBUG nova.network.neutron [-] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.474720] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Releasing lock "refresh_cache-0fd8d146-79fe-4e2a-90a3-67d457fc570f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.475045] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 585.475271] env[61998]: DEBUG nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 585.477345] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 585.502926] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.560211] env[61998]: DEBUG nova.network.neutron [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.728048] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331376a4-a761-4d22-8465-2e93500b4725 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.735786] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4d6312-4608-4835-86c1-a833c3db1012 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.772542] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b2bf44-81c8-479e-926c-fb0faaf28ee3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.776576] env[61998]: DEBUG nova.compute.manager [req-5936f724-5ff1-4483-bacc-b66491246e32 req-336c1405-af30-4fb8-82b2-0a9bd174df5c service nova] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Received event network-vif-deleted-b60cebfb-028c-4c99-b037-df42b62daac9 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 585.783361] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846b59c4-8023-4406-9937-9eaa813013bf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.800198] env[61998]: DEBUG nova.compute.provider_tree [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.801862] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0786aeed-a562-4a68-8550-7a4c94fd8f9a tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "e4ada227-b79a-457a-b063-dde99840aa14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.803s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.866458] env[61998]: INFO nova.compute.manager [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Rebuilding instance [ 585.933647] env[61998]: DEBUG nova.compute.manager [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 585.934584] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e49519-24c3-4ce0-93f1-e0d95be22f2b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.955196] env[61998]: INFO nova.compute.manager [-] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Took 1.05 seconds to deallocate network for instance. [ 585.957167] env[61998]: DEBUG nova.compute.claims [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 585.957360] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.006095] env[61998]: DEBUG nova.network.neutron [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.063857] env[61998]: DEBUG oslo_concurrency.lockutils [req-c450fbd5-d09c-41a4-ae95-92f0698fce09 req-2a13bbb9-83e2-4110-97c8-2bf90cb0069e service nova] Releasing lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.307636] env[61998]: DEBUG nova.scheduler.client.report [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 586.311633] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 586.509455] env[61998]: INFO nova.compute.manager [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] [instance: 0fd8d146-79fe-4e2a-90a3-67d457fc570f] Took 1.03 seconds to deallocate network for instance. [ 586.817238] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.819182] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 586.831061] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.781s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.839027] env[61998]: INFO nova.compute.claims [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.876233] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.950770] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 586.951097] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8ed52df-653c-4f38-b0b0-a3b51e16ac4a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.959018] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 586.959018] env[61998]: value = "task-1388398" [ 586.959018] env[61998]: _type = "Task" [ 586.959018] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.970031] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388398, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.332905] env[61998]: DEBUG nova.compute.utils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 587.334902] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 587.339117] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 587.432232] env[61998]: DEBUG nova.policy [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'edc90c24b0b540bd977449377d9477b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b328ec6d95d140d3bb99bff5bddb58c6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 587.471210] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388398, 'name': PowerOffVM_Task, 'duration_secs': 0.15643} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.472040] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 587.472040] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 587.472590] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d13aa92-38bb-440e-ae6f-0573b99ebe1b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.483345] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 587.483513] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1679cb95-6b6e-4cec-b328-2cc540a7ad5b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.506752] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 587.507045] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 587.507245] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleting the datastore file [datastore1] e4ada227-b79a-457a-b063-dde99840aa14 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 587.507510] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-797cbbc2-2553-49b0-a4ea-f02edd7e3a57 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.514836] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 587.514836] env[61998]: value = "task-1388401" [ 587.514836] env[61998]: _type = "Task" [ 587.514836] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.527705] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.552027] env[61998]: INFO nova.scheduler.client.report [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Deleted allocations for instance 0fd8d146-79fe-4e2a-90a3-67d457fc570f [ 587.818176] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Successfully created port: cebc5609-a26d-4a2c-9232-e2a23c02b6be {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 587.842455] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 588.032224] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134283} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.032224] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 588.032224] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 588.032224] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 588.068058] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2f9d4272-6f46-49aa-b042-4fd689e4d836 tempest-ServersWithSpecificFlavorTestJSON-596499299 tempest-ServersWithSpecificFlavorTestJSON-596499299-project-member] Lock "0fd8d146-79fe-4e2a-90a3-67d457fc570f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.731s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.287920] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a51f4ef-80e5-437e-9106-b834a607399c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.295700] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ff3d1a-412f-4e73-b262-14dd6e89b607 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.331428] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a806f430-e214-4435-8595-2b43e8e53dec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.339111] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf85a953-bf7e-4026-bf73-3cedcbf3fdf3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.355908] env[61998]: DEBUG nova.compute.provider_tree [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.572938] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 588.807118] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Acquiring lock "9025d114-10da-4cf8-9e5f-2520bfd3b246" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.807624] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Lock "9025d114-10da-4cf8-9e5f-2520bfd3b246" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.858504] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 588.862665] env[61998]: DEBUG nova.scheduler.client.report [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 588.900415] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 588.900688] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 588.900858] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.901053] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 588.901198] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.901341] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 588.901542] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 588.901698] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 588.901861] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 588.903515] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 588.903797] env[61998]: DEBUG nova.virt.hardware [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.905357] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b4195b-097b-4353-ab90-ca337e447293 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.917128] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0379a1a7-69f5-40ea-98a2-3c7b7d84cae3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.071479] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 589.071827] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 589.073806] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.073806] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 589.073806] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.074978] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 589.074978] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 589.074978] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 589.074978] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 589.074978] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 589.075259] env[61998]: DEBUG nova.virt.hardware [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 589.076544] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5782239-6b1b-45a3-bff2-77cb5955e0cb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.087769] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a317693c-ab34-4ffe-86e4-70781b21f551 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.108205] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.113272] env[61998]: DEBUG oslo.service.loopingcall [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.114393] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.114602] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 589.114846] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70b536e2-452f-4e2e-9a36-9b1171368f5c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.137612] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.137612] env[61998]: value = "task-1388402" [ 589.137612] env[61998]: _type = "Task" [ 589.137612] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.149373] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388402, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.367900] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.368547] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 589.374988] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.311s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.376573] env[61998]: INFO nova.compute.claims [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.652178] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388402, 'name': CreateVM_Task, 'duration_secs': 0.2657} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.652178] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 589.653440] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.654158] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.657029] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 589.657029] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca2de8dd-3f22-4737-8504-32bb49732747 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.660700] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 589.660700] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5244502f-4dce-f38d-c3eb-42e9f7d31d33" [ 589.660700] env[61998]: _type = "Task" [ 589.660700] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.671035] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5244502f-4dce-f38d-c3eb-42e9f7d31d33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.738147] env[61998]: DEBUG nova.compute.manager [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Received event network-changed-cebc5609-a26d-4a2c-9232-e2a23c02b6be {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 589.738147] env[61998]: DEBUG nova.compute.manager [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Refreshing instance network info cache due to event network-changed-cebc5609-a26d-4a2c-9232-e2a23c02b6be. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 589.738383] env[61998]: DEBUG oslo_concurrency.lockutils [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] Acquiring lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.738383] env[61998]: DEBUG oslo_concurrency.lockutils [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] Acquired lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.738524] env[61998]: DEBUG nova.network.neutron [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Refreshing network info cache for port cebc5609-a26d-4a2c-9232-e2a23c02b6be {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 589.894550] env[61998]: DEBUG nova.compute.utils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.894550] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 589.894550] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.016596] env[61998]: DEBUG nova.policy [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45e5c7148ac343ee8674cf6747d7df0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102883704d52434591e74440e02262fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 590.104354] env[61998]: ERROR nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. [ 590.104354] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 590.104354] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 590.104354] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 590.104354] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.104354] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 590.104354] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.104354] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 590.104354] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.104354] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 590.104354] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.104354] env[61998]: ERROR nova.compute.manager raise self.value [ 590.104354] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.104354] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 590.104354] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.104354] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 590.104733] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.104733] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 590.104733] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. [ 590.104733] env[61998]: ERROR nova.compute.manager [ 590.104733] env[61998]: Traceback (most recent call last): [ 590.104733] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 590.104733] env[61998]: listener.cb(fileno) [ 590.104733] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.104733] env[61998]: result = function(*args, **kwargs) [ 590.104733] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 590.104733] env[61998]: return func(*args, **kwargs) [ 590.104733] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 590.104733] env[61998]: raise e [ 590.104733] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 590.104733] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 590.104733] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.104733] env[61998]: created_port_ids = self._update_ports_for_instance( [ 590.104733] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.104733] env[61998]: with excutils.save_and_reraise_exception(): [ 590.104733] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.104733] env[61998]: self.force_reraise() [ 590.104733] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.104733] env[61998]: raise self.value [ 590.104733] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.104733] env[61998]: updated_port = self._update_port( [ 590.104733] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.104733] env[61998]: _ensure_no_port_binding_failure(port) [ 590.104733] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.104733] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 590.105393] env[61998]: nova.exception.PortBindingFailed: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. [ 590.105393] env[61998]: Removing descriptor: 15 [ 590.105444] env[61998]: ERROR nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Traceback (most recent call last): [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] yield resources [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self.driver.spawn(context, instance, image_meta, [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] vm_ref = self.build_virtual_machine(instance, [ 590.105444] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] for vif in network_info: [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return self._sync_wrapper(fn, *args, **kwargs) [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self.wait() [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self[:] = self._gt.wait() [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return self._exit_event.wait() [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 590.105695] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] result = hub.switch() [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return self.greenlet.switch() [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] result = function(*args, **kwargs) [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return func(*args, **kwargs) [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] raise e [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] nwinfo = self.network_api.allocate_for_instance( [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] created_port_ids = self._update_ports_for_instance( [ 590.105956] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] with excutils.save_and_reraise_exception(): [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self.force_reraise() [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] raise self.value [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] updated_port = self._update_port( [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] _ensure_no_port_binding_failure(port) [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] raise exception.PortBindingFailed(port_id=port['id']) [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] nova.exception.PortBindingFailed: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. [ 590.106190] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] [ 590.106424] env[61998]: INFO nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Terminating instance [ 590.109079] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Acquiring lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.174751] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5244502f-4dce-f38d-c3eb-42e9f7d31d33, 'name': SearchDatastore_Task, 'duration_secs': 0.008546} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.175073] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.175302] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.175526] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.175660] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.175829] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 590.176094] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-658f569c-0b7b-4111-8537-d7a2f28419b0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.187040] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 590.187223] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 590.188498] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28182116-fe23-4b49-8010-38ca29178da1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.194691] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 590.194691] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5268bdc4-c1a8-f985-ba76-3aeac3a45093" [ 590.194691] env[61998]: _type = "Task" [ 590.194691] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.203239] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5268bdc4-c1a8-f985-ba76-3aeac3a45093, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.262323] env[61998]: DEBUG nova.network.neutron [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.355083] env[61998]: DEBUG nova.network.neutron [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.394219] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 590.467329] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Successfully created port: 2924fd97-aeb1-44e8-9977-63613685db15 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 590.711174] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5268bdc4-c1a8-f985-ba76-3aeac3a45093, 'name': SearchDatastore_Task, 'duration_secs': 0.008337} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.712065] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b097f96-01bb-41b5-84df-fb02da684b06 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.722388] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 590.722388] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52bd162c-7e09-63c2-a92f-722aee4dd24a" [ 590.722388] env[61998]: _type = "Task" [ 590.722388] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.732119] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52bd162c-7e09-63c2-a92f-722aee4dd24a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.859165] env[61998]: DEBUG oslo_concurrency.lockutils [req-da5b0e95-2f23-4251-bded-7309aa5b05f2 req-5341afd0-a378-4589-b3a3-7ce26534619a service nova] Releasing lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.859165] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Acquired lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.859165] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.893106] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cefdb28-bc34-4808-919c-596dce9907f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.905385] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a9ca35-9cd0-4265-9cab-4ff21f5cabb8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.941809] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beaeec4d-c8ee-4d3c-88e1-8901b53b2e11 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.950652] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab92a63-946e-4800-8640-fae5bb2e543a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.966075] env[61998]: DEBUG nova.compute.provider_tree [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.234607] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52bd162c-7e09-63c2-a92f-722aee4dd24a, 'name': SearchDatastore_Task, 'duration_secs': 0.00904} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.234607] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.234607] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 591.234607] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8282fbdf-30d0-43a1-ae40-b439b8b65874 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.244439] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 591.244439] env[61998]: value = "task-1388404" [ 591.244439] env[61998]: _type = "Task" [ 591.244439] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.254684] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388404, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.390956] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.413491] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 591.455306] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.455306] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.455492] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.455613] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.455833] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.455833] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.456584] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.456825] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.457042] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.460138] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.460138] env[61998]: DEBUG nova.virt.hardware [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.460138] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2296d14e-1bca-423d-a6d1-1a67cafe8bb1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.471038] env[61998]: DEBUG nova.scheduler.client.report [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 591.476766] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dacfb3-3c42-4fed-ab88-d54c29fa3333 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.531529] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.756705] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388404, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492447} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.756705] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 591.756890] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 591.757290] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9e2637a-515f-4b1e-998e-d9036ae9ed53 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.768874] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 591.768874] env[61998]: value = "task-1388406" [ 591.768874] env[61998]: _type = "Task" [ 591.768874] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.783101] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388406, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.867432] env[61998]: DEBUG nova.compute.manager [req-ceb5f9d7-db07-417d-adb9-2df18a776482 req-3d2c657d-340a-4194-8b37-e93ccbe680f5 service nova] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Received event network-vif-deleted-cebc5609-a26d-4a2c-9232-e2a23c02b6be {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 591.984227] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.984756] env[61998]: DEBUG nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 591.987271] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.714s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.988649] env[61998]: INFO nova.compute.claims [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.034317] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Releasing lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.034671] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 592.038017] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 592.038017] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f9906ab-889e-408a-a696-24115eb242c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.047710] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2549cec-06d3-49fc-9d3e-7857233b85a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.075823] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59330fd4-c362-4593-824d-d40c00f3f5d2 could not be found. [ 592.076051] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 592.076238] env[61998]: INFO nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 592.076589] env[61998]: DEBUG oslo.service.loopingcall [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.076845] env[61998]: DEBUG nova.compute.manager [-] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 592.076969] env[61998]: DEBUG nova.network.neutron [-] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 592.100528] env[61998]: DEBUG nova.compute.manager [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Received event network-changed-2924fd97-aeb1-44e8-9977-63613685db15 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 592.100643] env[61998]: DEBUG nova.compute.manager [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Refreshing instance network info cache due to event network-changed-2924fd97-aeb1-44e8-9977-63613685db15. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 592.100876] env[61998]: DEBUG oslo_concurrency.lockutils [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] Acquiring lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.100982] env[61998]: DEBUG oslo_concurrency.lockutils [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] Acquired lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.101149] env[61998]: DEBUG nova.network.neutron [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Refreshing network info cache for port 2924fd97-aeb1-44e8-9977-63613685db15 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 592.106230] env[61998]: DEBUG nova.network.neutron [-] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.125882] env[61998]: ERROR nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. [ 592.125882] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 592.125882] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 592.125882] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 592.125882] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.125882] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 592.125882] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.125882] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 592.125882] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.125882] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 592.125882] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.125882] env[61998]: ERROR nova.compute.manager raise self.value [ 592.125882] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.125882] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 592.125882] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.125882] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 592.126420] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.126420] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 592.126420] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. [ 592.126420] env[61998]: ERROR nova.compute.manager [ 592.126420] env[61998]: Traceback (most recent call last): [ 592.126420] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 592.126420] env[61998]: listener.cb(fileno) [ 592.126420] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.126420] env[61998]: result = function(*args, **kwargs) [ 592.126420] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 592.126420] env[61998]: return func(*args, **kwargs) [ 592.126420] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 592.126420] env[61998]: raise e [ 592.126420] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 592.126420] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 592.126420] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.126420] env[61998]: created_port_ids = self._update_ports_for_instance( [ 592.126420] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.126420] env[61998]: with excutils.save_and_reraise_exception(): [ 592.126420] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.126420] env[61998]: self.force_reraise() [ 592.126420] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.126420] env[61998]: raise self.value [ 592.126420] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.126420] env[61998]: updated_port = self._update_port( [ 592.126420] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.126420] env[61998]: _ensure_no_port_binding_failure(port) [ 592.126420] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.126420] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 592.127037] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. [ 592.127037] env[61998]: Removing descriptor: 17 [ 592.127037] env[61998]: ERROR nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Traceback (most recent call last): [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] yield resources [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self.driver.spawn(context, instance, image_meta, [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.127037] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] vm_ref = self.build_virtual_machine(instance, [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] for vif in network_info: [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return self._sync_wrapper(fn, *args, **kwargs) [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self.wait() [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self[:] = self._gt.wait() [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return self._exit_event.wait() [ 592.127283] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] result = hub.switch() [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return self.greenlet.switch() [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] result = function(*args, **kwargs) [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return func(*args, **kwargs) [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] raise e [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] nwinfo = self.network_api.allocate_for_instance( [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.127536] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] created_port_ids = self._update_ports_for_instance( [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] with excutils.save_and_reraise_exception(): [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self.force_reraise() [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] raise self.value [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] updated_port = self._update_port( [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] _ensure_no_port_binding_failure(port) [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.127787] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] raise exception.PortBindingFailed(port_id=port['id']) [ 592.128142] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] nova.exception.PortBindingFailed: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. [ 592.128142] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] [ 592.128142] env[61998]: INFO nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Terminating instance [ 592.129254] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.279991] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388406, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060693} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.280270] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 592.281061] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c519ff4d-c8dd-4707-95bb-7e0a6c8d4591 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.303184] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 592.303487] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f199a3f-4116-41eb-8514-7f862b0e6f38 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.326424] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 592.326424] env[61998]: value = "task-1388407" [ 592.326424] env[61998]: _type = "Task" [ 592.326424] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.334997] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.493441] env[61998]: DEBUG nova.compute.utils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 592.498039] env[61998]: DEBUG nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Not allocating networking since 'none' was specified. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 592.609160] env[61998]: DEBUG nova.network.neutron [-] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.645334] env[61998]: DEBUG nova.network.neutron [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.793557] env[61998]: DEBUG nova.network.neutron [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.838045] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388407, 'name': ReconfigVM_Task, 'duration_secs': 0.487889} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.838045] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Reconfigured VM instance instance-0000000d to attach disk [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.838210] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08fcdfb5-1118-4280-b430-e9515d9e7118 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.847640] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 592.847640] env[61998]: value = "task-1388408" [ 592.847640] env[61998]: _type = "Task" [ 592.847640] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.859609] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388408, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.990845] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Acquiring lock "62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.991095] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Lock "62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.003051] env[61998]: DEBUG nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 593.113727] env[61998]: INFO nova.compute.manager [-] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Took 1.04 seconds to deallocate network for instance. [ 593.116951] env[61998]: DEBUG nova.compute.claims [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 593.117200] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.302173] env[61998]: DEBUG oslo_concurrency.lockutils [req-0a03c9b5-d5a2-4bcc-8a7c-bf70dc010376 req-958180cb-c3c1-4e5e-8599-277e259c0f6e service nova] Releasing lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.302173] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.302173] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.358831] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388408, 'name': Rename_Task, 'duration_secs': 0.212878} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.359255] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 593.359598] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb8eb329-cad3-43df-99ff-e723db4250dc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.367890] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 593.367890] env[61998]: value = "task-1388409" [ 593.367890] env[61998]: _type = "Task" [ 593.367890] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.378933] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388409, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.466919] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c329212-0bdc-4639-b3ef-46daefab45ad {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.475548] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7744483f-8a90-494b-8ea6-e314be2432a6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.511971] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c5e42b-7005-4d27-bfb4-4e876d856606 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.520597] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babe8907-5663-470b-a539-0727b6aca9e1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.536570] env[61998]: DEBUG nova.compute.provider_tree [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.833920] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 593.892018] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388409, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.995060] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.016887] env[61998]: DEBUG nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 594.041292] env[61998]: DEBUG nova.scheduler.client.report [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 594.057533] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 594.057775] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 594.057928] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.058120] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 594.058268] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.058411] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 594.058615] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 594.058770] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 594.058934] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 594.060865] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 594.060865] env[61998]: DEBUG nova.virt.hardware [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.060865] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e07808-85a4-4fee-938d-e2df68df8914 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.071284] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03584769-b4f6-4cce-8112-d6c780692e91 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.086927] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 594.092680] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Creating folder: Project (5d0e752a9ff64f2bb8e138e76c5c258d). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 594.092990] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f967ac45-0d6f-450c-bf6f-781f67786df3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.106221] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Created folder: Project (5d0e752a9ff64f2bb8e138e76c5c258d) in parent group-v294665. [ 594.106800] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Creating folder: Instances. Parent ref: group-v294681. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 594.106800] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23dd448a-699d-40d5-979d-45db8b2804e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.117820] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Created folder: Instances in parent group-v294681. [ 594.118493] env[61998]: DEBUG oslo.service.loopingcall [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.118493] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 594.118888] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0517308e-9ca7-4c3f-b022-f01fd79e068c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.140541] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 594.140541] env[61998]: value = "task-1388412" [ 594.140541] env[61998]: _type = "Task" [ 594.140541] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.148813] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388412, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.297469] env[61998]: DEBUG nova.compute.manager [req-ab7c3a05-d28c-4871-a9ec-a5b5c5cffa87 req-f5cf7e5e-4666-441c-a328-9b55431f3f8a service nova] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Received event network-vif-deleted-2924fd97-aeb1-44e8-9977-63613685db15 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 594.387613] env[61998]: DEBUG oslo_vmware.api [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388409, 'name': PowerOnVM_Task, 'duration_secs': 0.685321} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.387884] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 594.388094] env[61998]: DEBUG nova.compute.manager [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 594.389877] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788dcd7b-1973-4a3f-b9c2-ee5bb1afa8f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.500163] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.500163] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 594.500163] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 594.500163] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c071b61e-24dc-48f3-bf9a-e7bbd117bee0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.514055] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85577df4-ea8a-4649-8f9e-d5e1a8804094 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.542360] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance df154c2a-3616-442d-abb0-83e68cf1141d could not be found. [ 594.542614] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 594.542815] env[61998]: INFO nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 594.543565] env[61998]: DEBUG oslo.service.loopingcall [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.543977] env[61998]: DEBUG nova.compute.manager [-] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 594.543977] env[61998]: DEBUG nova.network.neutron [-] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 594.548734] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.549255] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 594.552608] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.207s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.554277] env[61998]: INFO nova.compute.claims [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.568128] env[61998]: DEBUG nova.network.neutron [-] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.651940] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388412, 'name': CreateVM_Task, 'duration_secs': 0.499888} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.653868] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 594.654399] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.654564] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.654893] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 594.655186] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-963e709a-2ac1-4420-b602-fa1da4cf86a2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.661036] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 594.661036] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529dfc7d-d67e-4ce5-d35f-f86417b2a8ce" [ 594.661036] env[61998]: _type = "Task" [ 594.661036] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.670274] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529dfc7d-d67e-4ce5-d35f-f86417b2a8ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.912419] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.063177] env[61998]: DEBUG nova.compute.utils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 595.067690] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 595.067690] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 595.069436] env[61998]: DEBUG nova.network.neutron [-] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.139766] env[61998]: DEBUG nova.policy [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc49824ed098470a8356c397c081d39f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9beef1e3808f408aa446636ed98674bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 595.185620] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Acquiring lock "5f205b7d-d93e-436d-9d7d-04c6f767f7ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.185824] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Lock "5f205b7d-d93e-436d-9d7d-04c6f767f7ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.186051] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529dfc7d-d67e-4ce5-d35f-f86417b2a8ce, 'name': SearchDatastore_Task, 'duration_secs': 0.022988} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.186510] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.186590] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 595.186764] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.186901] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.187658] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 595.187902] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54821c97-4eea-4460-8bc3-cee3f6a0acf6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.199952] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 595.200211] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 595.201046] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dae4e321-71c9-4ca5-a35e-7ac79ff01f77 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.212108] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 595.212108] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5228f2d6-ee17-9e75-40da-b1978851b7fb" [ 595.212108] env[61998]: _type = "Task" [ 595.212108] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.222805] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5228f2d6-ee17-9e75-40da-b1978851b7fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.534731] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Successfully created port: c969908d-2baa-444f-ad85-d6e514854266 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.568077] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 595.573709] env[61998]: INFO nova.compute.manager [-] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Took 1.03 seconds to deallocate network for instance. [ 595.580525] env[61998]: DEBUG nova.compute.claims [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 595.580711] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.731584] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5228f2d6-ee17-9e75-40da-b1978851b7fb, 'name': SearchDatastore_Task, 'duration_secs': 0.011658} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.732623] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8989b282-e074-450a-ac6f-292e867c7dd7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.742289] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 595.742289] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]522407a3-6158-f485-8d2d-18eb5e204ee9" [ 595.742289] env[61998]: _type = "Task" [ 595.742289] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.756201] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522407a3-6158-f485-8d2d-18eb5e204ee9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.159219] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011897b8-8e4f-4693-99e8-ebc605b770bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.171509] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e478a89-103b-47d9-96e4-5d13f13d13c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.214761] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5e55c7-78ae-463d-b592-a708a44ecac4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.226145] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f93730-e3dc-4fef-8e48-c1a4b8ab9a09 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.242630] env[61998]: DEBUG nova.compute.provider_tree [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.256765] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522407a3-6158-f485-8d2d-18eb5e204ee9, 'name': SearchDatastore_Task, 'duration_secs': 0.016923} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.258149] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.258577] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.258955] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e296f13-24c8-4b12-a8ca-440632393167 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.269706] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 596.269706] env[61998]: value = "task-1388413" [ 596.269706] env[61998]: _type = "Task" [ 596.269706] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.282155] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388413, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.358746] env[61998]: INFO nova.compute.manager [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Rebuilding instance [ 596.417612] env[61998]: DEBUG nova.compute.manager [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 596.418806] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cc7bde-cf18-4079-bd6a-27fc8f735e2b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.582097] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 596.610073] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 596.610073] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 596.610073] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 596.610528] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 596.610528] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 596.610528] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 596.610528] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 596.611059] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 596.611059] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 596.611059] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 596.611059] env[61998]: DEBUG nova.virt.hardware [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 596.612043] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e38567-2905-40bd-a1c1-2d5cd977227f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.621476] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfa5c69-3bba-4044-a413-3ccb6e742544 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.750675] env[61998]: DEBUG nova.scheduler.client.report [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 596.784477] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388413, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.794102] env[61998]: DEBUG nova.compute.manager [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Received event network-changed-c969908d-2baa-444f-ad85-d6e514854266 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 596.794319] env[61998]: DEBUG nova.compute.manager [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Refreshing instance network info cache due to event network-changed-c969908d-2baa-444f-ad85-d6e514854266. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 596.794551] env[61998]: DEBUG oslo_concurrency.lockutils [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] Acquiring lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.794687] env[61998]: DEBUG oslo_concurrency.lockutils [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] Acquired lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.794852] env[61998]: DEBUG nova.network.neutron [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Refreshing network info cache for port c969908d-2baa-444f-ad85-d6e514854266 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 597.189530] env[61998]: ERROR nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. [ 597.189530] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 597.189530] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 597.189530] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 597.189530] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.189530] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 597.189530] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.189530] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 597.189530] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.189530] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 597.189530] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.189530] env[61998]: ERROR nova.compute.manager raise self.value [ 597.189530] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.189530] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 597.189530] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.189530] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 597.191276] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.191276] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 597.191276] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. [ 597.191276] env[61998]: ERROR nova.compute.manager [ 597.191276] env[61998]: Traceback (most recent call last): [ 597.191276] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 597.191276] env[61998]: listener.cb(fileno) [ 597.191276] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.191276] env[61998]: result = function(*args, **kwargs) [ 597.191276] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.191276] env[61998]: return func(*args, **kwargs) [ 597.191276] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 597.191276] env[61998]: raise e [ 597.191276] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 597.191276] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 597.191276] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.191276] env[61998]: created_port_ids = self._update_ports_for_instance( [ 597.191276] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.191276] env[61998]: with excutils.save_and_reraise_exception(): [ 597.191276] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.191276] env[61998]: self.force_reraise() [ 597.191276] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.191276] env[61998]: raise self.value [ 597.191276] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.191276] env[61998]: updated_port = self._update_port( [ 597.191276] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.191276] env[61998]: _ensure_no_port_binding_failure(port) [ 597.191276] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.191276] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 597.192191] env[61998]: nova.exception.PortBindingFailed: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. [ 597.192191] env[61998]: Removing descriptor: 17 [ 597.192191] env[61998]: ERROR nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Traceback (most recent call last): [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] yield resources [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self.driver.spawn(context, instance, image_meta, [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 597.192191] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] vm_ref = self.build_virtual_machine(instance, [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] vif_infos = vmwarevif.get_vif_info(self._session, [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] for vif in network_info: [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return self._sync_wrapper(fn, *args, **kwargs) [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self.wait() [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self[:] = self._gt.wait() [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return self._exit_event.wait() [ 597.192472] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] result = hub.switch() [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return self.greenlet.switch() [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] result = function(*args, **kwargs) [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return func(*args, **kwargs) [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] raise e [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] nwinfo = self.network_api.allocate_for_instance( [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.192763] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] created_port_ids = self._update_ports_for_instance( [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] with excutils.save_and_reraise_exception(): [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self.force_reraise() [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] raise self.value [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] updated_port = self._update_port( [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] _ensure_no_port_binding_failure(port) [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.193076] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] raise exception.PortBindingFailed(port_id=port['id']) [ 597.193346] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] nova.exception.PortBindingFailed: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. [ 597.193346] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] [ 597.193346] env[61998]: INFO nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Terminating instance [ 597.193520] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Acquiring lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.259503] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.260054] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 597.262580] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.258s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.264142] env[61998]: INFO nova.compute.claims [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.283256] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388413, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712785} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.283256] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.284173] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 597.284173] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1dc3be8-510d-4b30-b0e8-31f6c134374b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.291786] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 597.291786] env[61998]: value = "task-1388414" [ 597.291786] env[61998]: _type = "Task" [ 597.291786] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.302394] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388414, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.319594] env[61998]: DEBUG nova.network.neutron [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.423558] env[61998]: DEBUG nova.network.neutron [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.434083] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 597.434549] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d27049f4-d722-4c8b-b9d8-b68fe104b5f1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.444398] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 597.444398] env[61998]: value = "task-1388415" [ 597.444398] env[61998]: _type = "Task" [ 597.444398] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.453699] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.768657] env[61998]: DEBUG nova.compute.utils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 597.772930] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 597.773143] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 597.807973] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388414, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.198918} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.809353] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 597.809353] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2712e0b-eb96-4a41-aeba-bf40569355a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.831851] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 597.832792] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae06ea95-9dae-478c-b8e0-436816ff7a6c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.857523] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 597.857523] env[61998]: value = "task-1388416" [ 597.857523] env[61998]: _type = "Task" [ 597.857523] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.858986] env[61998]: DEBUG nova.policy [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b84cd3fd81e46bc92dc56fab06abd23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c3bc46730f54d79b5a8ea24ff68cffc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 597.869970] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388416, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.927671] env[61998]: DEBUG oslo_concurrency.lockutils [req-7eb85017-7f3e-472f-bdac-d8f242d1ac6c req-39e2e322-e519-459e-893e-1f02576c37b9 service nova] Releasing lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.928165] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Acquired lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.928388] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.956153] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388415, 'name': PowerOffVM_Task, 'duration_secs': 0.151449} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.956566] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 597.956834] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 597.957707] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d98617-bff4-4915-994c-7f65709b3724 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.965479] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 597.965683] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c86ac70-499c-45fa-be52-d062767b8762 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.994272] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 597.994533] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 597.994736] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Deleting the datastore file [datastore2] e4ada227-b79a-457a-b063-dde99840aa14 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 597.995958] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f374446-f233-4379-b67d-7a6d23e54837 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.004141] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 598.004141] env[61998]: value = "task-1388418" [ 598.004141] env[61998]: _type = "Task" [ 598.004141] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.016265] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.263049] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Successfully created port: e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.274076] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 598.374015] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388416, 'name': ReconfigVM_Task, 'duration_secs': 0.469523} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.374317] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Reconfigured VM instance instance-00000011 to attach disk [datastore2] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 598.374932] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42e921a7-cfd2-4531-af74-931c93ac521f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.390441] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 598.390441] env[61998]: value = "task-1388419" [ 598.390441] env[61998]: _type = "Task" [ 598.390441] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.405149] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388419, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.456210] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.519840] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176889} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.520165] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 598.520471] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 598.520558] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 598.604402] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.817229] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd107cad-edab-403a-840d-2a65e59df0a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.826984] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab79d96-11fe-4f91-9a39-eb572e6f0f4f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.868722] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc085aa-592a-4508-9307-76b76f453033 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.874908] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4814e91b-c1e2-47f5-bda0-e4bc6d83601f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.896094] env[61998]: DEBUG nova.compute.provider_tree [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.907590] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388419, 'name': Rename_Task, 'duration_secs': 0.150486} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.908613] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 598.908867] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-089b66c9-e216-44b9-9dd8-1f9a49996dcd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.916798] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 598.916798] env[61998]: value = "task-1388420" [ 598.916798] env[61998]: _type = "Task" [ 598.916798] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.926430] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388420, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.968802] env[61998]: DEBUG nova.compute.manager [req-0363ea52-3d3c-4904-981c-2334d76c1bed req-f7d6e822-d9ed-4444-bb47-683c524fd985 service nova] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Received event network-vif-deleted-c969908d-2baa-444f-ad85-d6e514854266 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 599.112539] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Releasing lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.114613] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 599.114839] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 599.115252] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c5720e4-c2b3-4d75-a672-496da1760831 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.131315] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6211e908-461d-46dc-8fb1-03c323bc9400 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.157006] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad2f23df-c067-4d30-b143-e50ebcc50d4e could not be found. [ 599.157493] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 599.157889] env[61998]: INFO nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 599.157950] env[61998]: DEBUG oslo.service.loopingcall [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.158181] env[61998]: DEBUG nova.compute.manager [-] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 599.158274] env[61998]: DEBUG nova.network.neutron [-] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.181553] env[61998]: DEBUG nova.network.neutron [-] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.288337] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 599.321852] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 599.322157] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 599.322317] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.322535] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 599.322823] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.323160] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 599.323418] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 599.323603] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 599.323849] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 599.323959] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 599.324221] env[61998]: DEBUG nova.virt.hardware [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 599.325380] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb2f0a0-93d3-4db7-a8e7-2d1f9d85e442 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.335529] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a9ab39-c967-4c72-8c4a-bbdff81b0267 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.387294] env[61998]: ERROR nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. [ 599.387294] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 599.387294] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 599.387294] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 599.387294] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.387294] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 599.387294] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.387294] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 599.387294] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.387294] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 599.387294] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.387294] env[61998]: ERROR nova.compute.manager raise self.value [ 599.387294] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.387294] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 599.387294] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.387294] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 599.387725] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.387725] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 599.387725] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. [ 599.387725] env[61998]: ERROR nova.compute.manager [ 599.387725] env[61998]: Traceback (most recent call last): [ 599.387725] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 599.387725] env[61998]: listener.cb(fileno) [ 599.387725] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.387725] env[61998]: result = function(*args, **kwargs) [ 599.387725] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 599.387725] env[61998]: return func(*args, **kwargs) [ 599.387725] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 599.387725] env[61998]: raise e [ 599.387725] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 599.387725] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 599.387725] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.387725] env[61998]: created_port_ids = self._update_ports_for_instance( [ 599.387725] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.387725] env[61998]: with excutils.save_and_reraise_exception(): [ 599.387725] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.387725] env[61998]: self.force_reraise() [ 599.387725] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.387725] env[61998]: raise self.value [ 599.387725] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.387725] env[61998]: updated_port = self._update_port( [ 599.387725] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.387725] env[61998]: _ensure_no_port_binding_failure(port) [ 599.387725] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.387725] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 599.388403] env[61998]: nova.exception.PortBindingFailed: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. [ 599.388403] env[61998]: Removing descriptor: 17 [ 599.388403] env[61998]: ERROR nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Traceback (most recent call last): [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] yield resources [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self.driver.spawn(context, instance, image_meta, [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 599.388403] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] vm_ref = self.build_virtual_machine(instance, [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] for vif in network_info: [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return self._sync_wrapper(fn, *args, **kwargs) [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self.wait() [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self[:] = self._gt.wait() [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return self._exit_event.wait() [ 599.388683] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] result = hub.switch() [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return self.greenlet.switch() [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] result = function(*args, **kwargs) [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return func(*args, **kwargs) [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] raise e [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] nwinfo = self.network_api.allocate_for_instance( [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.388981] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] created_port_ids = self._update_ports_for_instance( [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] with excutils.save_and_reraise_exception(): [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self.force_reraise() [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] raise self.value [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] updated_port = self._update_port( [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] _ensure_no_port_binding_failure(port) [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.390112] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] raise exception.PortBindingFailed(port_id=port['id']) [ 599.390386] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] nova.exception.PortBindingFailed: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. [ 599.390386] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] [ 599.390386] env[61998]: INFO nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Terminating instance [ 599.390971] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Acquiring lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.391149] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Acquired lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.391350] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.400491] env[61998]: DEBUG nova.scheduler.client.report [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 599.431660] env[61998]: DEBUG oslo_vmware.api [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388420, 'name': PowerOnVM_Task, 'duration_secs': 0.449897} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.431928] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 599.435298] env[61998]: INFO nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Took 5.42 seconds to spawn the instance on the hypervisor. [ 599.435511] env[61998]: DEBUG nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 599.436348] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693868d6-1641-4e80-b70f-e456b402a46c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.553747] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 599.553998] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 599.554169] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.554350] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 599.554494] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.554635] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 599.554832] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 599.554986] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 599.555239] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 599.555383] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 599.555677] env[61998]: DEBUG nova.virt.hardware [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 599.556824] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072ff169-836a-4c36-bebd-66c4c74e0f21 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.565563] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c33417d-7b97-4f36-90a1-6b49fbbbc8ba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.579915] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.585401] env[61998]: DEBUG oslo.service.loopingcall [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.585638] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 599.585841] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-461d8f31-cf6f-4edf-af12-4ddcaeed88cc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.604472] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.604472] env[61998]: value = "task-1388421" [ 599.604472] env[61998]: _type = "Task" [ 599.604472] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.614664] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388421, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.684782] env[61998]: DEBUG nova.network.neutron [-] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.905586] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.906691] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 599.908904] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.711s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.910829] env[61998]: INFO nova.compute.claims [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 599.915970] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.959569] env[61998]: INFO nova.compute.manager [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Took 28.92 seconds to build instance. [ 600.015833] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.115429] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388421, 'name': CreateVM_Task, 'duration_secs': 0.313177} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.115651] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 600.116132] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.116323] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.116686] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 600.116969] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-341d48b8-55bc-494e-bd98-c1217fc59609 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.122941] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 600.122941] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52bd51a9-9ecc-2c9d-fd79-f758e02b7533" [ 600.122941] env[61998]: _type = "Task" [ 600.122941] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.133027] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52bd51a9-9ecc-2c9d-fd79-f758e02b7533, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.188688] env[61998]: INFO nova.compute.manager [-] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Took 1.03 seconds to deallocate network for instance. [ 600.191955] env[61998]: DEBUG nova.compute.claims [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 600.192819] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.418020] env[61998]: DEBUG nova.compute.utils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.423212] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 600.423212] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 600.461033] env[61998]: DEBUG oslo_concurrency.lockutils [None req-31cce176-bb05-4b5b-9842-04078334c15c tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "a8f6254f-b867-4967-b4fa-bb70f471f89d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.430s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.467127] env[61998]: DEBUG nova.policy [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea7bc069037447c4bd9451e0f9106689', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad0aa62c68274ebd9dc50e07a08a3e76', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 600.519072] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Releasing lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.522258] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 600.522258] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.522258] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f3ae156-c693-4916-8756-42d1995876d8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.531544] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6452b9f0-79dd-45b1-bc37-855d34910c35 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.566638] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e37ac276-8a3e-45b3-8176-d972eb4e6e3e could not be found. [ 600.566887] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 600.567211] env[61998]: INFO nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 600.567397] env[61998]: DEBUG oslo.service.loopingcall [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.567791] env[61998]: DEBUG nova.compute.manager [-] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 600.567791] env[61998]: DEBUG nova.network.neutron [-] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 600.588633] env[61998]: DEBUG nova.network.neutron [-] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.603647] env[61998]: INFO nova.compute.manager [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Rebuilding instance [ 600.639956] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52bd51a9-9ecc-2c9d-fd79-f758e02b7533, 'name': SearchDatastore_Task, 'duration_secs': 0.015987} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.643876] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.644175] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.644380] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.644546] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.644687] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.645176] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c938ace3-23e5-4610-bf04-06ab6ebcd485 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.655259] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.655259] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 600.656030] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f32748-c4fd-4aa2-a5fe-41c16883a848 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.666145] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 600.666145] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52234433-3def-3b37-d45d-1c9abae2accb" [ 600.666145] env[61998]: _type = "Task" [ 600.666145] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.679177] env[61998]: DEBUG nova.compute.manager [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 600.679504] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52234433-3def-3b37-d45d-1c9abae2accb, 'name': SearchDatastore_Task, 'duration_secs': 0.011793} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.680217] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193bf67f-fc78-47f8-8dc8-341e7aa12f74 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.684604] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b5f624c-334d-4e2c-89e8-3016390a6f14 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.694995] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 600.694995] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52d89835-7564-0d26-7027-af5031cd828c" [ 600.694995] env[61998]: _type = "Task" [ 600.694995] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.705655] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52d89835-7564-0d26-7027-af5031cd828c, 'name': SearchDatastore_Task, 'duration_secs': 0.008422} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.706554] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.706554] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 600.706554] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bbba8c2-9d76-4df8-b6a4-a05d226691c9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.714094] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 600.714094] env[61998]: value = "task-1388422" [ 600.714094] env[61998]: _type = "Task" [ 600.714094] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.722841] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.922859] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 600.954680] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Successfully created port: bc42264c-bfda-4aab-8e36-54a4ffacdb53 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.965157] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 601.081853] env[61998]: DEBUG nova.compute.manager [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Received event network-changed-e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 601.082069] env[61998]: DEBUG nova.compute.manager [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Refreshing instance network info cache due to event network-changed-e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 601.083033] env[61998]: DEBUG oslo_concurrency.lockutils [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] Acquiring lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.083033] env[61998]: DEBUG oslo_concurrency.lockutils [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] Acquired lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.083210] env[61998]: DEBUG nova.network.neutron [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Refreshing network info cache for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 601.090270] env[61998]: DEBUG nova.network.neutron [-] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.229876] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388422, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.501210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.528432] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e19d84c-6eb7-46aa-8500-f7a6e2638cb2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.164870] env[61998]: INFO nova.compute.manager [-] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Took 1.60 seconds to deallocate network for instance. [ 602.165649] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 602.166728] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 602.171737] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4aab5718-8fe9-4eca-858d-924297fae455 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.177485] env[61998]: DEBUG nova.compute.claims [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 602.177485] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.182541] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed6313f-50b1-421f-a045-359821ee4380 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.188320] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 602.188320] env[61998]: value = "task-1388423" [ 602.188320] env[61998]: _type = "Task" [ 602.188320] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.219347] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388422, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.821644} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.220472] env[61998]: DEBUG nova.network.neutron [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.224157] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 602.224856] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 602.225940] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93367fdf-46af-43cc-9774-b0afec07c108 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.232472] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8e892fc-ad57-4831-bc0c-804f5cc66780 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.233504] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.241419] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55352f9c-15af-4854-b08a-787c906827e7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.246393] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 602.246393] env[61998]: value = "task-1388424" [ 602.246393] env[61998]: _type = "Task" [ 602.246393] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.249337] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.249717] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.249784] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.249922] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.250078] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.250225] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.250433] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.250581] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.250742] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.250896] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.251080] env[61998]: DEBUG nova.virt.hardware [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.251964] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93aece3-5028-45fb-8556-fb393ef46440 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.266131] env[61998]: DEBUG nova.compute.provider_tree [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.276105] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9fa365-6938-4299-9e79-1e7541f5bec9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.280086] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.352551] env[61998]: DEBUG nova.network.neutron [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.587697] env[61998]: ERROR nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. [ 602.587697] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 602.587697] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 602.587697] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 602.587697] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.587697] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 602.587697] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.587697] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 602.587697] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.587697] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 602.587697] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.587697] env[61998]: ERROR nova.compute.manager raise self.value [ 602.587697] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.587697] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 602.587697] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.587697] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 602.588330] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.588330] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 602.588330] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. [ 602.588330] env[61998]: ERROR nova.compute.manager [ 602.588330] env[61998]: Traceback (most recent call last): [ 602.588330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 602.588330] env[61998]: listener.cb(fileno) [ 602.588330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.588330] env[61998]: result = function(*args, **kwargs) [ 602.588330] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 602.588330] env[61998]: return func(*args, **kwargs) [ 602.588330] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 602.588330] env[61998]: raise e [ 602.588330] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 602.588330] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 602.588330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.588330] env[61998]: created_port_ids = self._update_ports_for_instance( [ 602.588330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.588330] env[61998]: with excutils.save_and_reraise_exception(): [ 602.588330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.588330] env[61998]: self.force_reraise() [ 602.588330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.588330] env[61998]: raise self.value [ 602.588330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.588330] env[61998]: updated_port = self._update_port( [ 602.588330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.588330] env[61998]: _ensure_no_port_binding_failure(port) [ 602.588330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.588330] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 602.588926] env[61998]: nova.exception.PortBindingFailed: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. [ 602.588926] env[61998]: Removing descriptor: 17 [ 602.588926] env[61998]: ERROR nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Traceback (most recent call last): [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] yield resources [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self.driver.spawn(context, instance, image_meta, [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.588926] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] vm_ref = self.build_virtual_machine(instance, [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] for vif in network_info: [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return self._sync_wrapper(fn, *args, **kwargs) [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self.wait() [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self[:] = self._gt.wait() [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return self._exit_event.wait() [ 602.589192] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] result = hub.switch() [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return self.greenlet.switch() [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] result = function(*args, **kwargs) [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return func(*args, **kwargs) [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] raise e [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] nwinfo = self.network_api.allocate_for_instance( [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.589470] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] created_port_ids = self._update_ports_for_instance( [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] with excutils.save_and_reraise_exception(): [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self.force_reraise() [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] raise self.value [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] updated_port = self._update_port( [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] _ensure_no_port_binding_failure(port) [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.589748] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] raise exception.PortBindingFailed(port_id=port['id']) [ 602.589993] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] nova.exception.PortBindingFailed: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. [ 602.589993] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] [ 602.589993] env[61998]: INFO nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Terminating instance [ 602.592167] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.592372] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquired lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.592570] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.700534] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388423, 'name': PowerOffVM_Task, 'duration_secs': 0.130848} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.701054] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 602.701487] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 602.702525] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53920c6-20f3-4bcc-8c23-6d0400f2a399 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.711774] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 602.711774] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-995443ce-ddaf-4a02-9fe7-17b429ec0b01 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.741948] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 602.741948] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 602.741948] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Deleting the datastore file [datastore2] a8f6254f-b867-4967-b4fa-bb70f471f89d {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 602.741948] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57b34052-2444-4938-9cf9-6da5850c9ccd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.750232] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 602.750232] env[61998]: value = "task-1388426" [ 602.750232] env[61998]: _type = "Task" [ 602.750232] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.762612] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388426, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.766294] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102761} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.766694] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.768240] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e2fb9a-77af-4dcf-bf6a-9c575bebf9bf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.771159] env[61998]: DEBUG nova.scheduler.client.report [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 602.803940] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 602.805026] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b9f3920-8d40-4016-b411-229462cf5b8e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.834815] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 602.834815] env[61998]: value = "task-1388427" [ 602.834815] env[61998]: _type = "Task" [ 602.834815] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.845567] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388427, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.859221] env[61998]: DEBUG oslo_concurrency.lockutils [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] Releasing lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.860498] env[61998]: DEBUG nova.compute.manager [req-f83e089e-5588-4747-b809-75a26d02f5ee req-739d3009-08e7-4ab4-9d0f-713c554d4b43 service nova] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Received event network-vif-deleted-e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 603.115734] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.135821] env[61998]: DEBUG nova.compute.manager [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Received event network-changed-bc42264c-bfda-4aab-8e36-54a4ffacdb53 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 603.136193] env[61998]: DEBUG nova.compute.manager [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Refreshing instance network info cache due to event network-changed-bc42264c-bfda-4aab-8e36-54a4ffacdb53. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 603.136883] env[61998]: DEBUG oslo_concurrency.lockutils [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] Acquiring lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.262611] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388426, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101789} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.263059] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 603.263374] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 603.263673] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 603.280021] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.369s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.280021] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 603.281226] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.324s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.348277] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388427, 'name': ReconfigVM_Task, 'duration_secs': 0.308645} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.351024] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Reconfigured VM instance instance-0000000d to attach disk [datastore2] e4ada227-b79a-457a-b063-dde99840aa14/e4ada227-b79a-457a-b063-dde99840aa14.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 603.352758] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2baba0b5-e4a4-47ec-9b09-afd138638c36 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.358844] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Acquiring lock "dce49aac-03f3-48ed-9bad-c5eb2d779bae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.359470] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Lock "dce49aac-03f3-48ed-9bad-c5eb2d779bae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.366199] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 603.366199] env[61998]: value = "task-1388428" [ 603.366199] env[61998]: _type = "Task" [ 603.366199] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.375725] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388428, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.427587] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.786549] env[61998]: DEBUG nova.compute.utils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.792020] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 603.792020] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.872690] env[61998]: DEBUG nova.policy [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6423d0a86724e239792039797cf44f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f9d84ad259f469881b6d87317f7c26c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 603.883389] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388428, 'name': Rename_Task, 'duration_secs': 0.152075} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.883662] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 603.883897] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c46b632-a283-4571-aa34-8e949654edc9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.895413] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Waiting for the task: (returnval){ [ 603.895413] env[61998]: value = "task-1388429" [ 603.895413] env[61998]: _type = "Task" [ 603.895413] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.905072] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388429, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.930954] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Releasing lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.931509] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 603.931713] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 603.932120] env[61998]: DEBUG oslo_concurrency.lockutils [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] Acquired lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.932200] env[61998]: DEBUG nova.network.neutron [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Refreshing network info cache for port bc42264c-bfda-4aab-8e36-54a4ffacdb53 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 603.933330] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-460e81ba-10d4-44ea-bd70-a56f72d04bd3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.950024] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076226ce-443e-481d-9da0-838b8fd58845 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.977214] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 975b0c65-6f57-4c7c-ae46-b23920a039f7 could not be found. [ 603.977214] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 603.977214] env[61998]: INFO nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 603.977214] env[61998]: DEBUG oslo.service.loopingcall [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.979563] env[61998]: DEBUG nova.compute.manager [-] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 603.979871] env[61998]: DEBUG nova.network.neutron [-] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 604.003307] env[61998]: DEBUG nova.network.neutron [-] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.294313] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 604.315018] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 604.315018] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 604.315018] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.315018] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 604.315355] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.315355] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 604.315355] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 604.316458] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 604.316853] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 604.318067] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 604.318405] env[61998]: DEBUG nova.virt.hardware [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 604.319406] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675cc2da-1383-47fc-9c65-32ddb1829181 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.333103] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c0f84d-12f2-4072-8aa1-c3e20ccfc1c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.357529] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.363488] env[61998]: DEBUG oslo.service.loopingcall [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.366405] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 604.368020] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a2bbf83-2e90-4b81-beeb-d8b727ff2c47 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.380060] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Successfully created port: 3fd67133-0c2c-4c5e-88a3-769cf0a6176e {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.384853] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b799fe34-ee6b-48a2-851b-ebe1e750853d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.391019] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.391019] env[61998]: value = "task-1388430" [ 604.391019] env[61998]: _type = "Task" [ 604.391019] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.395262] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac666ff-50d8-40df-9826-eaeadf393ea5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.404595] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388430, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.410961] env[61998]: DEBUG oslo_vmware.api [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Task: {'id': task-1388429, 'name': PowerOnVM_Task, 'duration_secs': 0.463013} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.434606] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 604.434863] env[61998]: DEBUG nova.compute.manager [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 604.438961] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11df9d50-956b-40ae-85d9-86584838a790 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.439717] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a57b2c8-38d0-4b2f-b835-389a41f551ef {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.455114] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0152b2d-3f3c-4e74-9ac3-da84315f1d2b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.460509] env[61998]: DEBUG nova.network.neutron [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.472038] env[61998]: DEBUG nova.compute.provider_tree [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.509249] env[61998]: DEBUG nova.network.neutron [-] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.575450] env[61998]: DEBUG nova.network.neutron [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.882360] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Successfully created port: ea489904-5338-470e-aa60-81fa54816224 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.899925] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388430, 'name': CreateVM_Task, 'duration_secs': 0.256653} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.900113] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 604.904020] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.904020] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.904020] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 604.904020] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ad6b1d-594e-491f-ad59-b3db579e76a8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.907713] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 604.907713] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52706155-bcba-c32b-16fd-bfaf0b8efe8a" [ 604.907713] env[61998]: _type = "Task" [ 604.907713] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.916294] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52706155-bcba-c32b-16fd-bfaf0b8efe8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.963153] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.975441] env[61998]: DEBUG nova.scheduler.client.report [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 605.008808] env[61998]: INFO nova.compute.manager [-] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Took 1.03 seconds to deallocate network for instance. [ 605.011196] env[61998]: DEBUG nova.compute.claims [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 605.011415] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.079206] env[61998]: DEBUG oslo_concurrency.lockutils [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] Releasing lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.079484] env[61998]: DEBUG nova.compute.manager [req-e3f42a87-b541-4abc-bc14-77f2d5f5d7c8 req-cc38f8d0-dce1-4a21-8eac-8a564109ed40 service nova] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Received event network-vif-deleted-bc42264c-bfda-4aab-8e36-54a4ffacdb53 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 605.304171] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Successfully created port: 453cfde8-da44-4337-9782-0c4ef8c8bff1 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.312889] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 605.334541] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.334799] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.335016] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.335509] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.335764] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.336081] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.336270] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.336853] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.337552] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.337812] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.338044] env[61998]: DEBUG nova.virt.hardware [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.339124] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af16b64-bbf1-4160-994c-668385bf3131 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.349421] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba51758-4c36-4a95-97c9-75153ee0cd40 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.419362] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52706155-bcba-c32b-16fd-bfaf0b8efe8a, 'name': SearchDatastore_Task, 'duration_secs': 0.01979} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.419722] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.421215] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.421215] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.421215] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.421215] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.421215] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9760ca03-e711-4668-8186-f414d2cae27e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.432221] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.432221] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 605.432817] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72cfd64b-4895-444f-af1d-ee2212068678 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.439015] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 605.439015] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52f4ecc4-deaf-f534-92c3-6c18b4ba8588" [ 605.439015] env[61998]: _type = "Task" [ 605.439015] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.449864] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52f4ecc4-deaf-f534-92c3-6c18b4ba8588, 'name': SearchDatastore_Task, 'duration_secs': 0.008628} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.451045] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5a103b5-e926-4bee-8507-b874fed07151 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.457993] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 605.457993] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524aecd1-4517-813d-82c2-7797fef6a578" [ 605.457993] env[61998]: _type = "Task" [ 605.457993] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.468009] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524aecd1-4517-813d-82c2-7797fef6a578, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.480891] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.200s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.484330] env[61998]: ERROR nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Traceback (most recent call last): [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self.driver.spawn(context, instance, image_meta, [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] vm_ref = self.build_virtual_machine(instance, [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.484330] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] for vif in network_info: [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return self._sync_wrapper(fn, *args, **kwargs) [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self.wait() [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self[:] = self._gt.wait() [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return self._exit_event.wait() [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] result = hub.switch() [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.484581] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return self.greenlet.switch() [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] result = function(*args, **kwargs) [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] return func(*args, **kwargs) [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] raise e [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] nwinfo = self.network_api.allocate_for_instance( [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] created_port_ids = self._update_ports_for_instance( [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] with excutils.save_and_reraise_exception(): [ 605.484860] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] self.force_reraise() [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] raise self.value [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] updated_port = self._update_port( [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] _ensure_no_port_binding_failure(port) [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] raise exception.PortBindingFailed(port_id=port['id']) [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] nova.exception.PortBindingFailed: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. [ 605.485165] env[61998]: ERROR nova.compute.manager [instance: ef129347-9ea0-4615-b897-f51e664da1a7] [ 605.485410] env[61998]: DEBUG nova.compute.utils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 605.485410] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.608s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.486512] env[61998]: INFO nova.compute.claims [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.489842] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Build of instance ef129347-9ea0-4615-b897-f51e664da1a7 was re-scheduled: Binding failed for port b60cebfb-028c-4c99-b037-df42b62daac9, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 605.490354] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 605.490617] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.490808] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.491008] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.969655] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524aecd1-4517-813d-82c2-7797fef6a578, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.970076] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.970367] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 605.971018] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d896e73-f651-4302-b8e7-445f4b3788c5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.980107] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 605.980107] env[61998]: value = "task-1388431" [ 605.980107] env[61998]: _type = "Task" [ 605.980107] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.990670] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.014650] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.132313] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.386835] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "e4ada227-b79a-457a-b063-dde99840aa14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.387108] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "e4ada227-b79a-457a-b063-dde99840aa14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.387370] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "e4ada227-b79a-457a-b063-dde99840aa14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.387997] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "e4ada227-b79a-457a-b063-dde99840aa14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.390509] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "e4ada227-b79a-457a-b063-dde99840aa14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.393030] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Acquiring lock "87f859c6-7a96-4a48-adb8-814a134ad4c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.393030] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Lock "87f859c6-7a96-4a48-adb8-814a134ad4c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.393503] env[61998]: INFO nova.compute.manager [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Terminating instance [ 606.395510] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "refresh_cache-e4ada227-b79a-457a-b063-dde99840aa14" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.395674] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquired lock "refresh_cache-e4ada227-b79a-457a-b063-dde99840aa14" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.395828] env[61998]: DEBUG nova.network.neutron [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.430033] env[61998]: DEBUG nova.compute.manager [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Received event network-changed-3fd67133-0c2c-4c5e-88a3-769cf0a6176e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 606.430342] env[61998]: DEBUG nova.compute.manager [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Refreshing instance network info cache due to event network-changed-3fd67133-0c2c-4c5e-88a3-769cf0a6176e. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 606.430687] env[61998]: DEBUG oslo_concurrency.lockutils [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] Acquiring lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.430908] env[61998]: DEBUG oslo_concurrency.lockutils [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] Acquired lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.431181] env[61998]: DEBUG nova.network.neutron [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Refreshing network info cache for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.491607] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505096} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.492574] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 606.492574] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 606.492574] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3939de4-f579-4bcd-9daf-3e7cf0d78a57 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.504529] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 606.504529] env[61998]: value = "task-1388432" [ 606.504529] env[61998]: _type = "Task" [ 606.504529] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.518733] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388432, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.577933] env[61998]: ERROR nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. [ 606.577933] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 606.577933] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.577933] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 606.577933] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.577933] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 606.577933] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.577933] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 606.577933] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.577933] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 606.577933] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.577933] env[61998]: ERROR nova.compute.manager raise self.value [ 606.577933] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.577933] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 606.577933] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.577933] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 606.578616] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.578616] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 606.578616] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. [ 606.578616] env[61998]: ERROR nova.compute.manager [ 606.578616] env[61998]: Traceback (most recent call last): [ 606.578616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 606.578616] env[61998]: listener.cb(fileno) [ 606.578616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.578616] env[61998]: result = function(*args, **kwargs) [ 606.578616] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.578616] env[61998]: return func(*args, **kwargs) [ 606.578616] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 606.578616] env[61998]: raise e [ 606.578616] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.578616] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 606.578616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.578616] env[61998]: created_port_ids = self._update_ports_for_instance( [ 606.578616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.578616] env[61998]: with excutils.save_and_reraise_exception(): [ 606.578616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.578616] env[61998]: self.force_reraise() [ 606.578616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.578616] env[61998]: raise self.value [ 606.578616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.578616] env[61998]: updated_port = self._update_port( [ 606.578616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.578616] env[61998]: _ensure_no_port_binding_failure(port) [ 606.578616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.578616] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 606.579485] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. [ 606.579485] env[61998]: Removing descriptor: 17 [ 606.579485] env[61998]: ERROR nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Traceback (most recent call last): [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] yield resources [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self.driver.spawn(context, instance, image_meta, [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self._vmops.spawn(context, instance, image_meta, injected_files, [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 606.579485] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] vm_ref = self.build_virtual_machine(instance, [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] vif_infos = vmwarevif.get_vif_info(self._session, [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] for vif in network_info: [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return self._sync_wrapper(fn, *args, **kwargs) [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self.wait() [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self[:] = self._gt.wait() [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return self._exit_event.wait() [ 606.579909] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] result = hub.switch() [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return self.greenlet.switch() [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] result = function(*args, **kwargs) [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return func(*args, **kwargs) [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] raise e [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] nwinfo = self.network_api.allocate_for_instance( [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.580272] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] created_port_ids = self._update_ports_for_instance( [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] with excutils.save_and_reraise_exception(): [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self.force_reraise() [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] raise self.value [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] updated_port = self._update_port( [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] _ensure_no_port_binding_failure(port) [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.580605] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] raise exception.PortBindingFailed(port_id=port['id']) [ 606.580905] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] nova.exception.PortBindingFailed: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. [ 606.580905] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] [ 606.580905] env[61998]: INFO nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Terminating instance [ 606.583469] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.634548] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Releasing lock "refresh_cache-ef129347-9ea0-4615-b897-f51e664da1a7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.634779] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 606.634956] env[61998]: DEBUG nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 606.635139] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.652731] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.914291] env[61998]: DEBUG nova.network.neutron [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.939290] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27c7c49-54cd-4080-8bca-1873538ff167 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.948334] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7f0b4d-f8f4-4bc2-9626-6ca9b6e5fd54 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.980455] env[61998]: DEBUG nova.network.neutron [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.982751] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8545d3-d12a-42ad-8435-9b34b23beb65 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.990896] env[61998]: DEBUG nova.network.neutron [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.993025] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9f1ac2-fbff-442c-b604-6455f8400d5f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.007771] env[61998]: DEBUG nova.compute.provider_tree [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.018829] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388432, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067683} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.018829] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 607.020064] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c61bf35-a525-4307-ba9b-2bfec4c55e0a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.039956] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 607.042626] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8962a303-fc11-4fd6-8b10-cca075bed743 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.064091] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 607.064091] env[61998]: value = "task-1388433" [ 607.064091] env[61998]: _type = "Task" [ 607.064091] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.073186] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.096022] env[61998]: DEBUG nova.network.neutron [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.155880] env[61998]: DEBUG nova.network.neutron [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.498501] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Releasing lock "refresh_cache-e4ada227-b79a-457a-b063-dde99840aa14" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.498830] env[61998]: DEBUG nova.compute.manager [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 607.499050] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.499926] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f371f2d5-73b3-4643-9d13-d9a8d4495ce6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.508315] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 607.508617] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d51d07e1-fab4-4af3-95b8-8b98f0d32363 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.513181] env[61998]: DEBUG nova.scheduler.client.report [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 607.517830] env[61998]: DEBUG oslo_vmware.api [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 607.517830] env[61998]: value = "task-1388434" [ 607.517830] env[61998]: _type = "Task" [ 607.517830] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.529782] env[61998]: DEBUG oslo_vmware.api [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388434, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.573882] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.598833] env[61998]: DEBUG oslo_concurrency.lockutils [req-0c06ae6e-cfde-4869-bef7-5065529ac0e6 req-a11c1de3-4c92-4b3b-9ad4-2b99e9e2c141 service nova] Releasing lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.599353] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquired lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.599576] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.658944] env[61998]: INFO nova.compute.manager [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: ef129347-9ea0-4615-b897-f51e664da1a7] Took 1.02 seconds to deallocate network for instance. [ 608.025890] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.026453] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 608.031769] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.917s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.033793] env[61998]: INFO nova.compute.claims [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.037149] env[61998]: DEBUG oslo_vmware.api [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388434, 'name': PowerOffVM_Task, 'duration_secs': 0.147158} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.037809] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 608.037978] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 608.038239] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b617f2f5-b238-4c6d-aefa-8bd79cadb883 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.071820] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 608.073033] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 608.073033] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleting the datastore file [datastore2] e4ada227-b79a-457a-b063-dde99840aa14 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 608.073033] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcdec523-e5f8-42f5-b76d-c85bf6de2d61 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.078746] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388433, 'name': ReconfigVM_Task, 'duration_secs': 0.710417} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.079449] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Reconfigured VM instance instance-00000011 to attach disk [datastore1] a8f6254f-b867-4967-b4fa-bb70f471f89d/a8f6254f-b867-4967-b4fa-bb70f471f89d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 608.080112] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1d99b23-ccd9-4a8e-a29b-4342a0d718cc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.084243] env[61998]: DEBUG oslo_vmware.api [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for the task: (returnval){ [ 608.084243] env[61998]: value = "task-1388436" [ 608.084243] env[61998]: _type = "Task" [ 608.084243] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.088633] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 608.088633] env[61998]: value = "task-1388437" [ 608.088633] env[61998]: _type = "Task" [ 608.088633] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.095871] env[61998]: DEBUG oslo_vmware.api [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388436, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.100331] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388437, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.143527] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.239479] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.460191] env[61998]: DEBUG nova.compute.manager [req-b0872110-fac3-4695-aff5-4039434f3a1a req-abd94a3b-03c7-437b-b37c-fff315449b9e service nova] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Received event network-vif-deleted-3fd67133-0c2c-4c5e-88a3-769cf0a6176e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 608.539341] env[61998]: DEBUG nova.compute.utils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 608.544031] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 608.544031] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 608.581741] env[61998]: DEBUG nova.policy [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '89a976a9671f4ed1a0f768721b1b2d20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5018bbe6dfa452a92e1b72b0247d82f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 608.595588] env[61998]: DEBUG oslo_vmware.api [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Task: {'id': task-1388436, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182444} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.596179] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.596366] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 608.596536] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.596701] env[61998]: INFO nova.compute.manager [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Took 1.10 seconds to destroy the instance on the hypervisor. [ 608.596928] env[61998]: DEBUG oslo.service.loopingcall [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.597126] env[61998]: DEBUG nova.compute.manager [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 608.597220] env[61998]: DEBUG nova.network.neutron [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.601367] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388437, 'name': Rename_Task, 'duration_secs': 0.140199} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.601872] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 608.602108] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a17b728a-5b8f-487d-b10e-460bfc273a4e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.608282] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 608.608282] env[61998]: value = "task-1388438" [ 608.608282] env[61998]: _type = "Task" [ 608.608282] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.617321] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.617997] env[61998]: DEBUG nova.network.neutron [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.689183] env[61998]: INFO nova.scheduler.client.report [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Deleted allocations for instance ef129347-9ea0-4615-b897-f51e664da1a7 [ 608.741842] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Releasing lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.742369] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 608.742494] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 608.742791] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f9b512f-2baa-4e27-97e5-f52a91542893 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.753417] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7e3d9a-985b-4d60-8e8e-f82d33749427 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.787048] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6236c44a-e3c6-4302-8f15-4eb8dfaf5960 could not be found. [ 608.787048] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.787048] env[61998]: INFO nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Took 0.04 seconds to destroy the instance on the hypervisor. [ 608.787048] env[61998]: DEBUG oslo.service.loopingcall [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.787326] env[61998]: DEBUG nova.compute.manager [-] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 608.787326] env[61998]: DEBUG nova.network.neutron [-] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.839165] env[61998]: DEBUG nova.network.neutron [-] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.917682] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Successfully created port: 349e36b8-9012-4828-a720-1886f9013fa2 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.044665] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 609.119977] env[61998]: DEBUG oslo_vmware.api [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388438, 'name': PowerOnVM_Task, 'duration_secs': 0.443372} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.119977] env[61998]: DEBUG nova.network.neutron [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.121306] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 609.123244] env[61998]: DEBUG nova.compute.manager [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 609.124511] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3c92ec-6db7-4a64-9ff6-fbf6892d492f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.203214] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef8b6330-93dd-47cc-9a88-e2bca659a8d3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "ef129347-9ea0-4615-b897-f51e664da1a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.216s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.512971] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e073fb-2058-4401-ba28-dbe9bb2b5f36 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.521416] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4bab1e-b0ed-48ea-b2ab-bf25d1cbf627 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.558841] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc018ca5-4dd1-47c8-aa29-2c0513139496 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.567595] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a01236f-ed11-4644-ace7-476e877cc70a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.581991] env[61998]: DEBUG nova.compute.provider_tree [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.624664] env[61998]: INFO nova.compute.manager [-] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Took 1.03 seconds to deallocate network for instance. [ 609.647123] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.709463] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 609.814236] env[61998]: DEBUG nova.network.neutron [-] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.883368] env[61998]: DEBUG nova.compute.manager [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Received event network-changed-349e36b8-9012-4828-a720-1886f9013fa2 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 609.883368] env[61998]: DEBUG nova.compute.manager [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Refreshing instance network info cache due to event network-changed-349e36b8-9012-4828-a720-1886f9013fa2. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 609.883368] env[61998]: DEBUG oslo_concurrency.lockutils [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] Acquiring lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.883368] env[61998]: DEBUG oslo_concurrency.lockutils [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] Acquired lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.883368] env[61998]: DEBUG nova.network.neutron [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Refreshing network info cache for port 349e36b8-9012-4828-a720-1886f9013fa2 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.037518] env[61998]: ERROR nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. [ 610.037518] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 610.037518] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 610.037518] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 610.037518] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.037518] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 610.037518] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.037518] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 610.037518] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.037518] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 610.037518] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.037518] env[61998]: ERROR nova.compute.manager raise self.value [ 610.037518] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.037518] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 610.037518] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.037518] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 610.038062] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.038062] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 610.038062] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. [ 610.038062] env[61998]: ERROR nova.compute.manager [ 610.038062] env[61998]: Traceback (most recent call last): [ 610.038062] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 610.038062] env[61998]: listener.cb(fileno) [ 610.038062] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.038062] env[61998]: result = function(*args, **kwargs) [ 610.038062] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 610.038062] env[61998]: return func(*args, **kwargs) [ 610.038062] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 610.038062] env[61998]: raise e [ 610.038062] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 610.038062] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 610.038062] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.038062] env[61998]: created_port_ids = self._update_ports_for_instance( [ 610.038062] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.038062] env[61998]: with excutils.save_and_reraise_exception(): [ 610.038062] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.038062] env[61998]: self.force_reraise() [ 610.038062] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.038062] env[61998]: raise self.value [ 610.038062] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.038062] env[61998]: updated_port = self._update_port( [ 610.038062] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.038062] env[61998]: _ensure_no_port_binding_failure(port) [ 610.038062] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.038062] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 610.038829] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. [ 610.038829] env[61998]: Removing descriptor: 15 [ 610.063193] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 610.085094] env[61998]: DEBUG nova.scheduler.client.report [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 610.091145] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 610.091375] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 610.091529] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.091706] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 610.091850] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.091994] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 610.092213] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 610.092364] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 610.092526] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 610.092685] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 610.092852] env[61998]: DEBUG nova.virt.hardware [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 610.093712] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5748d624-df74-4573-9af3-292c71d0daee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.106020] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492d3de6-87cd-4c92-b92e-175c2a71c4be {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.109171] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "a8f6254f-b867-4967-b4fa-bb70f471f89d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.109390] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "a8f6254f-b867-4967-b4fa-bb70f471f89d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.109577] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "a8f6254f-b867-4967-b4fa-bb70f471f89d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.109755] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "a8f6254f-b867-4967-b4fa-bb70f471f89d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.109917] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "a8f6254f-b867-4967-b4fa-bb70f471f89d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.112203] env[61998]: INFO nova.compute.manager [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Terminating instance [ 610.113798] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "refresh_cache-a8f6254f-b867-4967-b4fa-bb70f471f89d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.113951] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquired lock "refresh_cache-a8f6254f-b867-4967-b4fa-bb70f471f89d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.114125] env[61998]: DEBUG nova.network.neutron [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 610.124354] env[61998]: ERROR nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Traceback (most recent call last): [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] yield resources [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self.driver.spawn(context, instance, image_meta, [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] vm_ref = self.build_virtual_machine(instance, [ 610.124354] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] vif_infos = vmwarevif.get_vif_info(self._session, [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] for vif in network_info: [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] return self._sync_wrapper(fn, *args, **kwargs) [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self.wait() [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self[:] = self._gt.wait() [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] return self._exit_event.wait() [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 610.124814] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] current.throw(*self._exc) [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] result = function(*args, **kwargs) [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] return func(*args, **kwargs) [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] raise e [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] nwinfo = self.network_api.allocate_for_instance( [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] created_port_ids = self._update_ports_for_instance( [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] with excutils.save_and_reraise_exception(): [ 610.125251] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self.force_reraise() [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] raise self.value [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] updated_port = self._update_port( [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] _ensure_no_port_binding_failure(port) [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] raise exception.PortBindingFailed(port_id=port['id']) [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] nova.exception.PortBindingFailed: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. [ 610.125657] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] [ 610.125657] env[61998]: INFO nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Terminating instance [ 610.126335] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Acquiring lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.133582] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.231163] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.319070] env[61998]: INFO nova.compute.manager [-] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Took 1.53 seconds to deallocate network for instance. [ 610.320182] env[61998]: DEBUG nova.compute.claims [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.320182] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.408361] env[61998]: DEBUG nova.network.neutron [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.539552] env[61998]: DEBUG nova.network.neutron [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.597627] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.598268] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 610.600904] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.484s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.640410] env[61998]: DEBUG nova.network.neutron [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.697614] env[61998]: DEBUG nova.network.neutron [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.045237] env[61998]: DEBUG oslo_concurrency.lockutils [req-0183bceb-c766-433d-a2c9-d00ce8d44aba req-7ac18fad-7621-4920-aeb7-b2e62fd0c368 service nova] Releasing lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.046428] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Acquired lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.046680] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.114834] env[61998]: DEBUG nova.compute.utils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 611.120134] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 611.120134] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.200205] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Releasing lock "refresh_cache-a8f6254f-b867-4967-b4fa-bb70f471f89d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.200631] env[61998]: DEBUG nova.compute.manager [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 611.200822] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 611.202720] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25e023e-37b4-4df4-b86f-286b1ae2784a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.216151] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 611.216421] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65ec762f-4952-4ed0-8272-96f79bee0977 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.223625] env[61998]: DEBUG oslo_vmware.api [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 611.223625] env[61998]: value = "task-1388439" [ 611.223625] env[61998]: _type = "Task" [ 611.223625] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.232820] env[61998]: DEBUG oslo_vmware.api [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.370757] env[61998]: DEBUG nova.policy [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c37d767029264a69af74a555bdbc5810', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a4a7166951841f88a44cd20122c06a4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 611.551131] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099b6b3e-82d2-4e80-be46-f22c21c43ef8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.562409] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f706cb-1e0a-40e1-93b2-2d08570cb1db {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.598735] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.601025] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e6dcdd-0306-4441-9b47-e692427ed177 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.609361] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd9dcd8-3375-4f60-be8e-4ad3598d0122 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.623454] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 611.626267] env[61998]: DEBUG nova.compute.provider_tree [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.694067] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.733429] env[61998]: DEBUG oslo_vmware.api [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388439, 'name': PowerOffVM_Task, 'duration_secs': 0.198028} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.733689] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 611.733854] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 611.734106] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c159c996-93c3-45bb-9f3b-113bab93b61b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.739328] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Successfully created port: 57786bf5-f441-4e5c-9e58-ace0f626cc7b {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.762540] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 611.762756] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 611.762936] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Deleting the datastore file [datastore1] a8f6254f-b867-4967-b4fa-bb70f471f89d {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 611.763216] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13858a62-f6a3-47b4-b8de-1e6c90642480 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.771387] env[61998]: DEBUG oslo_vmware.api [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for the task: (returnval){ [ 611.771387] env[61998]: value = "task-1388441" [ 611.771387] env[61998]: _type = "Task" [ 611.771387] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.780682] env[61998]: DEBUG oslo_vmware.api [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.099351] env[61998]: DEBUG nova.compute.manager [req-24442da5-5bb5-4376-bb94-34088b69a444 req-c7ea728a-51a1-42ce-988e-bb0f6b3f7c24 service nova] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Received event network-vif-deleted-349e36b8-9012-4828-a720-1886f9013fa2 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 612.140680] env[61998]: DEBUG nova.scheduler.client.report [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 612.196864] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Releasing lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.197320] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 612.197566] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 612.197875] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2dc1c173-8c20-46d0-8a5a-ea46aac287a6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.210331] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d68e6c-6148-44e1-bc46-817de9d6428d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.240408] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 71248677-92fb-4f66-b089-2cbbdc808bb7 could not be found. [ 612.240408] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 612.240408] env[61998]: INFO nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 612.240408] env[61998]: DEBUG oslo.service.loopingcall [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.240408] env[61998]: DEBUG nova.compute.manager [-] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 612.240408] env[61998]: DEBUG nova.network.neutron [-] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 612.257091] env[61998]: DEBUG nova.network.neutron [-] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.283421] env[61998]: DEBUG oslo_vmware.api [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Task: {'id': task-1388441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128689} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.283705] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 612.283885] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 612.284073] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 612.284361] env[61998]: INFO nova.compute.manager [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Took 1.08 seconds to destroy the instance on the hypervisor. [ 612.284609] env[61998]: DEBUG oslo.service.loopingcall [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.284796] env[61998]: DEBUG nova.compute.manager [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 612.284890] env[61998]: DEBUG nova.network.neutron [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 612.300495] env[61998]: DEBUG nova.network.neutron [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.638436] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 612.644785] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.044s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.645439] env[61998]: ERROR nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Traceback (most recent call last): [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self.driver.spawn(context, instance, image_meta, [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] vm_ref = self.build_virtual_machine(instance, [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.645439] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] for vif in network_info: [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return self._sync_wrapper(fn, *args, **kwargs) [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self.wait() [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self[:] = self._gt.wait() [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return self._exit_event.wait() [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] result = hub.switch() [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.645780] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return self.greenlet.switch() [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] result = function(*args, **kwargs) [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] return func(*args, **kwargs) [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] raise e [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] nwinfo = self.network_api.allocate_for_instance( [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] created_port_ids = self._update_ports_for_instance( [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] with excutils.save_and_reraise_exception(): [ 612.646132] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] self.force_reraise() [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] raise self.value [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] updated_port = self._update_port( [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] _ensure_no_port_binding_failure(port) [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] raise exception.PortBindingFailed(port_id=port['id']) [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] nova.exception.PortBindingFailed: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. [ 612.646468] env[61998]: ERROR nova.compute.manager [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] [ 612.646759] env[61998]: DEBUG nova.compute.utils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 612.649992] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Build of instance 59330fd4-c362-4593-824d-d40c00f3f5d2 was re-scheduled: Binding failed for port cebc5609-a26d-4a2c-9232-e2a23c02b6be, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 612.650435] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 612.650653] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Acquiring lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.650798] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Acquired lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.650949] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.651926] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.740s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.652060] env[61998]: DEBUG nova.objects.instance [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61998) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 612.654735] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "bc1ef57d-457d-446a-8ad4-3bab6d331215" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.654735] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "bc1ef57d-457d-446a-8ad4-3bab6d331215" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.667769] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.667992] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.668156] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.668330] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.668600] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.668783] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.669078] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.669241] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.669404] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.669558] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.669718] env[61998]: DEBUG nova.virt.hardware [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.670968] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8940a685-596d-41c3-8824-21665c253c62 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.683111] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a3e69e-de72-4833-b525-473dd1a7c74d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.759508] env[61998]: DEBUG nova.network.neutron [-] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.806063] env[61998]: DEBUG nova.network.neutron [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.815112] env[61998]: ERROR nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. [ 612.815112] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 612.815112] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 612.815112] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 612.815112] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.815112] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 612.815112] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.815112] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 612.815112] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.815112] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 612.815112] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.815112] env[61998]: ERROR nova.compute.manager raise self.value [ 612.815112] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.815112] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 612.815112] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.815112] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 612.815591] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.815591] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 612.815591] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. [ 612.815591] env[61998]: ERROR nova.compute.manager [ 612.815591] env[61998]: Traceback (most recent call last): [ 612.815591] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 612.815591] env[61998]: listener.cb(fileno) [ 612.815591] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.815591] env[61998]: result = function(*args, **kwargs) [ 612.815591] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.815591] env[61998]: return func(*args, **kwargs) [ 612.815591] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 612.815591] env[61998]: raise e [ 612.815591] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 612.815591] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 612.815591] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.815591] env[61998]: created_port_ids = self._update_ports_for_instance( [ 612.815591] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.815591] env[61998]: with excutils.save_and_reraise_exception(): [ 612.815591] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.815591] env[61998]: self.force_reraise() [ 612.815591] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.815591] env[61998]: raise self.value [ 612.815591] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.815591] env[61998]: updated_port = self._update_port( [ 612.815591] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.815591] env[61998]: _ensure_no_port_binding_failure(port) [ 612.815591] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.815591] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 612.816241] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. [ 612.816241] env[61998]: Removing descriptor: 15 [ 612.816241] env[61998]: ERROR nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Traceback (most recent call last): [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] yield resources [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self.driver.spawn(context, instance, image_meta, [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.816241] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] vm_ref = self.build_virtual_machine(instance, [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] for vif in network_info: [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return self._sync_wrapper(fn, *args, **kwargs) [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self.wait() [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self[:] = self._gt.wait() [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return self._exit_event.wait() [ 612.816513] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] result = hub.switch() [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return self.greenlet.switch() [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] result = function(*args, **kwargs) [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return func(*args, **kwargs) [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] raise e [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] nwinfo = self.network_api.allocate_for_instance( [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.816863] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] created_port_ids = self._update_ports_for_instance( [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] with excutils.save_and_reraise_exception(): [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self.force_reraise() [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] raise self.value [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] updated_port = self._update_port( [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] _ensure_no_port_binding_failure(port) [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.817221] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] raise exception.PortBindingFailed(port_id=port['id']) [ 612.817543] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] nova.exception.PortBindingFailed: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. [ 612.817543] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] [ 612.817543] env[61998]: INFO nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Terminating instance [ 612.818932] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Acquiring lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.819078] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Acquired lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.819286] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 613.176033] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.262190] env[61998]: INFO nova.compute.manager [-] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Took 1.02 seconds to deallocate network for instance. [ 613.264674] env[61998]: DEBUG nova.compute.claims [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 613.265269] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.275328] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.308132] env[61998]: INFO nova.compute.manager [-] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Took 1.02 seconds to deallocate network for instance. [ 613.335498] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.427399] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.664805] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ecdf2809-5607-40a1-8818-687e45ae59fd tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.666356] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.086s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.778872] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Releasing lock "refresh_cache-59330fd4-c362-4593-824d-d40c00f3f5d2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.779141] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 613.779328] env[61998]: DEBUG nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 613.779491] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.793996] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.815093] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.930675] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Releasing lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.931164] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 613.931366] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.932060] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6f3152b-143e-4f5f-8e4e-8f517ec9ae63 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.943691] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9121259-2303-4ab6-ad58-a39a4dc8c3c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.967720] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57 could not be found. [ 613.968218] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 613.968412] env[61998]: INFO nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Took 0.04 seconds to destroy the instance on the hypervisor. [ 613.968651] env[61998]: DEBUG oslo.service.loopingcall [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.968877] env[61998]: DEBUG nova.compute.manager [-] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 613.968971] env[61998]: DEBUG nova.network.neutron [-] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.983527] env[61998]: DEBUG nova.network.neutron [-] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.126265] env[61998]: DEBUG nova.compute.manager [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Received event network-changed-57786bf5-f441-4e5c-9e58-ace0f626cc7b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 614.126265] env[61998]: DEBUG nova.compute.manager [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Refreshing instance network info cache due to event network-changed-57786bf5-f441-4e5c-9e58-ace0f626cc7b. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 614.126265] env[61998]: DEBUG oslo_concurrency.lockutils [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] Acquiring lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.126265] env[61998]: DEBUG oslo_concurrency.lockutils [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] Acquired lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.126265] env[61998]: DEBUG nova.network.neutron [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Refreshing network info cache for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.296685] env[61998]: DEBUG nova.network.neutron [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.485740] env[61998]: DEBUG nova.network.neutron [-] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.530141] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9fb90e-dc32-4255-a275-1d82b0b51abb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.538222] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b934820a-3bb6-4082-aacf-534122bb4d01 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.569526] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b8c13a-4912-46dd-8bb7-6b9cecb674fa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.577197] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a72e7e-e8c7-4623-b07b-e275aefe7804 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.591794] env[61998]: DEBUG nova.compute.provider_tree [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.643413] env[61998]: DEBUG nova.network.neutron [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.719033] env[61998]: DEBUG nova.network.neutron [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.799114] env[61998]: INFO nova.compute.manager [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] [instance: 59330fd4-c362-4593-824d-d40c00f3f5d2] Took 1.02 seconds to deallocate network for instance. [ 614.990147] env[61998]: INFO nova.compute.manager [-] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Took 1.02 seconds to deallocate network for instance. [ 614.992484] env[61998]: DEBUG nova.compute.claims [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 614.992675] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.094414] env[61998]: DEBUG nova.scheduler.client.report [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 615.221041] env[61998]: DEBUG oslo_concurrency.lockutils [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] Releasing lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.221151] env[61998]: DEBUG nova.compute.manager [req-95245147-3883-4a1d-8563-93e45b7e2560 req-d8a87104-c86b-4a16-aaae-5d4a4d82f8ad service nova] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Received event network-vif-deleted-57786bf5-f441-4e5c-9e58-ace0f626cc7b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 615.599329] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.933s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.600044] env[61998]: ERROR nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Traceback (most recent call last): [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self.driver.spawn(context, instance, image_meta, [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] vm_ref = self.build_virtual_machine(instance, [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] vif_infos = vmwarevif.get_vif_info(self._session, [ 615.600044] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] for vif in network_info: [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return self._sync_wrapper(fn, *args, **kwargs) [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self.wait() [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self[:] = self._gt.wait() [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return self._exit_event.wait() [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] result = hub.switch() [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 615.600336] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return self.greenlet.switch() [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] result = function(*args, **kwargs) [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] return func(*args, **kwargs) [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] raise e [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] nwinfo = self.network_api.allocate_for_instance( [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] created_port_ids = self._update_ports_for_instance( [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] with excutils.save_and_reraise_exception(): [ 615.600618] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] self.force_reraise() [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] raise self.value [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] updated_port = self._update_port( [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] _ensure_no_port_binding_failure(port) [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] raise exception.PortBindingFailed(port_id=port['id']) [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] nova.exception.PortBindingFailed: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. [ 615.600904] env[61998]: ERROR nova.compute.manager [instance: df154c2a-3616-442d-abb0-83e68cf1141d] [ 615.601261] env[61998]: DEBUG nova.compute.utils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 615.602247] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.410s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.605990] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Build of instance df154c2a-3616-442d-abb0-83e68cf1141d was re-scheduled: Binding failed for port 2924fd97-aeb1-44e8-9977-63613685db15, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 615.606454] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 615.606679] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.606822] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.607018] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.832545] env[61998]: INFO nova.scheduler.client.report [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Deleted allocations for instance 59330fd4-c362-4593-824d-d40c00f3f5d2 [ 616.124988] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.212466] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.344771] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7dc15f4-f88c-471c-a81e-a4a2134fff2a tempest-ServersNegativeTestJSON-2127816395 tempest-ServersNegativeTestJSON-2127816395-project-member] Lock "59330fd4-c362-4593-824d-d40c00f3f5d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.091s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.479970] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30f25a7-47c5-41df-b20d-352350d9de75 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.489204] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73fd1a2-0d3e-4293-8e0c-ac7403b1521c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.523276] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90086d8-d2f5-40fa-8475-e9892f3a6bba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.531520] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1618517-b0ce-47f6-9db8-24102e44525f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.545141] env[61998]: DEBUG nova.compute.provider_tree [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.715880] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "refresh_cache-df154c2a-3616-442d-abb0-83e68cf1141d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.715880] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 616.715880] env[61998]: DEBUG nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 616.715880] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 616.737789] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.847789] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 617.049162] env[61998]: DEBUG nova.scheduler.client.report [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 617.240281] env[61998]: DEBUG nova.network.neutron [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.376246] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.555288] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.952s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.555288] env[61998]: ERROR nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. [ 617.555288] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Traceback (most recent call last): [ 617.555288] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 617.555288] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self.driver.spawn(context, instance, image_meta, [ 617.555288] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 617.555288] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.555288] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.555288] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] vm_ref = self.build_virtual_machine(instance, [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] for vif in network_info: [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return self._sync_wrapper(fn, *args, **kwargs) [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self.wait() [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self[:] = self._gt.wait() [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return self._exit_event.wait() [ 617.555978] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] result = hub.switch() [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return self.greenlet.switch() [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] result = function(*args, **kwargs) [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] return func(*args, **kwargs) [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] raise e [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] nwinfo = self.network_api.allocate_for_instance( [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 617.557442] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] created_port_ids = self._update_ports_for_instance( [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] with excutils.save_and_reraise_exception(): [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] self.force_reraise() [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] raise self.value [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] updated_port = self._update_port( [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] _ensure_no_port_binding_failure(port) [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.557788] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] raise exception.PortBindingFailed(port_id=port['id']) [ 617.558083] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] nova.exception.PortBindingFailed: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. [ 617.558083] env[61998]: ERROR nova.compute.manager [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] [ 617.558083] env[61998]: DEBUG nova.compute.utils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.558083] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.057s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.561020] env[61998]: INFO nova.compute.claims [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.562408] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Build of instance ad2f23df-c067-4d30-b143-e50ebcc50d4e was re-scheduled: Binding failed for port c969908d-2baa-444f-ad85-d6e514854266, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 617.563401] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 617.563401] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Acquiring lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.563401] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Acquired lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.563401] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.572032] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.572424] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.747429] env[61998]: INFO nova.compute.manager [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: df154c2a-3616-442d-abb0-83e68cf1141d] Took 1.03 seconds to deallocate network for instance. [ 618.077249] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.077418] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 618.077540] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Rebuilding the list of instances to heal {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10231}} [ 618.085496] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.174957] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.587546] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 618.587546] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 618.587734] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 618.587774] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 618.588284] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 618.588284] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 618.609829] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "refresh_cache-e4ada227-b79a-457a-b063-dde99840aa14" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.609999] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquired lock "refresh_cache-e4ada227-b79a-457a-b063-dde99840aa14" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.610178] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Forcefully refreshing network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 618.610337] env[61998]: DEBUG nova.objects.instance [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lazy-loading 'info_cache' on Instance uuid e4ada227-b79a-457a-b063-dde99840aa14 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 618.679292] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Releasing lock "refresh_cache-ad2f23df-c067-4d30-b143-e50ebcc50d4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.679519] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 618.679701] env[61998]: DEBUG nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 618.679867] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.698872] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.774749] env[61998]: INFO nova.scheduler.client.report [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Deleted allocations for instance df154c2a-3616-442d-abb0-83e68cf1141d [ 619.024895] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6f599d-775b-4889-9de0-d43124a89d41 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.033470] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f06432a-e6c9-4bb9-a6f3-df324ee62c32 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.064666] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545efff1-2158-427f-b015-889067bfc39a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.072681] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece93526-575b-416f-8710-f9ff3d0d8152 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.087743] env[61998]: DEBUG nova.compute.provider_tree [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.201102] env[61998]: DEBUG nova.network.neutron [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.284085] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78fc5888-dc51-4db6-b042-99c6e37b55e8 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "df154c2a-3616-442d-abb0-83e68cf1141d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.195s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.590547] env[61998]: DEBUG nova.scheduler.client.report [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 619.638133] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.704936] env[61998]: INFO nova.compute.manager [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] [instance: ad2f23df-c067-4d30-b143-e50ebcc50d4e] Took 1.02 seconds to deallocate network for instance. [ 619.787554] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 620.097355] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.097973] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 620.101036] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.924s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.202248] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.311982] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.546557] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "a733a167-9713-43b7-bcc0-b0af47879ffc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.546784] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "a733a167-9713-43b7-bcc0-b0af47879ffc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.606044] env[61998]: DEBUG nova.compute.utils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.610857] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 620.610857] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.704841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Releasing lock "refresh_cache-e4ada227-b79a-457a-b063-dde99840aa14" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.705065] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Updated the network info_cache for instance {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10298}} [ 620.706148] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.706148] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.706148] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.706148] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.706148] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.706415] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.706415] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 620.706476] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.735667] env[61998]: INFO nova.scheduler.client.report [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Deleted allocations for instance ad2f23df-c067-4d30-b143-e50ebcc50d4e [ 620.830261] env[61998]: DEBUG nova.policy [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '906c0701d2534ec18c38047adfcedea2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a6b03a12b5b4aeaaff1be7c50269b9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 621.010928] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5f198d-e2e0-4066-a31f-8aa9b94c93a7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.019840] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3e55ac-c866-4859-b3f4-fb4d3ed823e7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.051487] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d50a04-9ede-4980-9b56-8ea6aec77e24 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.059370] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a8ad3e-5274-4693-892f-36b847c8e8cc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.073115] env[61998]: DEBUG nova.compute.provider_tree [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.112451] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 621.183200] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Successfully created port: 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.212453] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.246590] env[61998]: DEBUG oslo_concurrency.lockutils [None req-25769ab2-5284-479f-96cd-f9a5dc802ff9 tempest-ServerMetadataNegativeTestJSON-2066213469 tempest-ServerMetadataNegativeTestJSON-2066213469-project-member] Lock "ad2f23df-c067-4d30-b143-e50ebcc50d4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.157s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.576393] env[61998]: DEBUG nova.scheduler.client.report [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 621.748813] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 621.916596] env[61998]: DEBUG nova.compute.manager [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Received event network-changed-60bdf1b2-ec40-492d-9a63-9aadcf1aaed4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 621.916596] env[61998]: DEBUG nova.compute.manager [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Refreshing instance network info cache due to event network-changed-60bdf1b2-ec40-492d-9a63-9aadcf1aaed4. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 621.916596] env[61998]: DEBUG oslo_concurrency.lockutils [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] Acquiring lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.916596] env[61998]: DEBUG oslo_concurrency.lockutils [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] Acquired lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.916596] env[61998]: DEBUG nova.network.neutron [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Refreshing network info cache for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.082217] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.981s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.082751] env[61998]: ERROR nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Traceback (most recent call last): [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self.driver.spawn(context, instance, image_meta, [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] vm_ref = self.build_virtual_machine(instance, [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.082751] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] for vif in network_info: [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return self._sync_wrapper(fn, *args, **kwargs) [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self.wait() [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self[:] = self._gt.wait() [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return self._exit_event.wait() [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] result = hub.switch() [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.083515] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return self.greenlet.switch() [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] result = function(*args, **kwargs) [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] return func(*args, **kwargs) [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] raise e [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] nwinfo = self.network_api.allocate_for_instance( [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] created_port_ids = self._update_ports_for_instance( [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] with excutils.save_and_reraise_exception(): [ 622.083814] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] self.force_reraise() [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] raise self.value [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] updated_port = self._update_port( [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] _ensure_no_port_binding_failure(port) [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] raise exception.PortBindingFailed(port_id=port['id']) [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] nova.exception.PortBindingFailed: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. [ 622.084117] env[61998]: ERROR nova.compute.manager [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] [ 622.084363] env[61998]: DEBUG nova.compute.utils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 622.084796] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.122s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.084868] env[61998]: DEBUG nova.objects.instance [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61998) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 622.091369] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Build of instance e37ac276-8a3e-45b3-8176-d972eb4e6e3e was re-scheduled: Binding failed for port e10d736b-c6c7-4ce2-bbd5-17e6fef1d92c, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 622.091369] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 622.091369] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Acquiring lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.091369] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Acquired lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.091551] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.120829] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 622.123991] env[61998]: ERROR nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. [ 622.123991] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 622.123991] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 622.123991] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 622.123991] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.123991] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 622.123991] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.123991] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 622.123991] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.123991] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 622.123991] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.123991] env[61998]: ERROR nova.compute.manager raise self.value [ 622.123991] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.123991] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 622.123991] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.123991] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 622.124473] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.124473] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 622.124473] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. [ 622.124473] env[61998]: ERROR nova.compute.manager [ 622.124473] env[61998]: Traceback (most recent call last): [ 622.124473] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 622.124473] env[61998]: listener.cb(fileno) [ 622.124473] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.124473] env[61998]: result = function(*args, **kwargs) [ 622.124473] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.124473] env[61998]: return func(*args, **kwargs) [ 622.124473] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 622.124473] env[61998]: raise e [ 622.124473] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 622.124473] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 622.124473] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.124473] env[61998]: created_port_ids = self._update_ports_for_instance( [ 622.124473] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.124473] env[61998]: with excutils.save_and_reraise_exception(): [ 622.124473] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.124473] env[61998]: self.force_reraise() [ 622.124473] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.124473] env[61998]: raise self.value [ 622.124473] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.124473] env[61998]: updated_port = self._update_port( [ 622.124473] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.124473] env[61998]: _ensure_no_port_binding_failure(port) [ 622.124473] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.124473] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 622.125123] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. [ 622.125123] env[61998]: Removing descriptor: 15 [ 622.150945] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 622.151205] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 622.151357] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.151534] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 622.151675] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.151897] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 622.152146] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 622.152309] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 622.152476] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 622.152654] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 622.152830] env[61998]: DEBUG nova.virt.hardware [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 622.153768] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d430b2-d52f-4e93-9c77-5be00791d01c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.162029] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d360832f-5cfb-473e-be0e-26cb9f13f90f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.179108] env[61998]: ERROR nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Traceback (most recent call last): [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] yield resources [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self.driver.spawn(context, instance, image_meta, [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] vm_ref = self.build_virtual_machine(instance, [ 622.179108] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] for vif in network_info: [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] return self._sync_wrapper(fn, *args, **kwargs) [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self.wait() [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self[:] = self._gt.wait() [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] return self._exit_event.wait() [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 622.179466] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] current.throw(*self._exc) [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] result = function(*args, **kwargs) [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] return func(*args, **kwargs) [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] raise e [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] nwinfo = self.network_api.allocate_for_instance( [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] created_port_ids = self._update_ports_for_instance( [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] with excutils.save_and_reraise_exception(): [ 622.179765] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self.force_reraise() [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] raise self.value [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] updated_port = self._update_port( [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] _ensure_no_port_binding_failure(port) [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] raise exception.PortBindingFailed(port_id=port['id']) [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] nova.exception.PortBindingFailed: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. [ 622.180136] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] [ 622.180136] env[61998]: INFO nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Terminating instance [ 622.184556] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Acquiring lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.280054] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.443362] env[61998]: DEBUG nova.network.neutron [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.541763] env[61998]: DEBUG nova.network.neutron [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.622878] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.672899] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.046018] env[61998]: DEBUG oslo_concurrency.lockutils [req-ce40a9d9-9dbe-4494-864a-1d747c88b588 req-64ef0c5d-30a8-423e-b6a6-966bee0e46de service nova] Releasing lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.046018] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Acquired lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.046018] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.096579] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4278d5a9-23aa-4e83-9ab4-08e3284aa17b tempest-ServersAdmin275Test-481212997 tempest-ServersAdmin275Test-481212997-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.097643] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.086s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.176184] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Releasing lock "refresh_cache-e37ac276-8a3e-45b3-8176-d972eb4e6e3e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.176184] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 623.176184] env[61998]: DEBUG nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 623.176184] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.191407] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.564030] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.670273] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.695056] env[61998]: DEBUG nova.network.neutron [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.938850] env[61998]: DEBUG nova.compute.manager [req-ec037df2-bfce-433e-90fc-1735916b552c req-590a0305-e891-4a9e-9a75-3908467c04ad service nova] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Received event network-vif-deleted-60bdf1b2-ec40-492d-9a63-9aadcf1aaed4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 623.961783] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9933a97-5d00-4de2-8ba6-e4df44b2f89c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.969968] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a314be28-c40b-4555-b2d5-c844a3e77a3a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.002162] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98353f05-f1d0-4e3b-9738-9cbab62b938a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.009522] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd586b1f-b920-4349-b972-723b2d52ce26 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.022583] env[61998]: DEBUG nova.compute.provider_tree [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.172951] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Releasing lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.173509] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 624.173725] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.174015] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a312f802-2ff3-4fd6-ba72-9551257f5497 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.183884] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbca6d9-d7e9-49c0-932f-0f72abdcea28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.199609] env[61998]: INFO nova.compute.manager [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] [instance: e37ac276-8a3e-45b3-8176-d972eb4e6e3e] Took 1.02 seconds to deallocate network for instance. [ 624.207159] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b4706725-5e28-4d2a-b4a8-7633ffa63afe could not be found. [ 624.207376] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.207551] env[61998]: INFO nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Took 0.03 seconds to destroy the instance on the hypervisor. [ 624.207821] env[61998]: DEBUG oslo.service.loopingcall [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.208269] env[61998]: DEBUG nova.compute.manager [-] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 624.208391] env[61998]: DEBUG nova.network.neutron [-] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.223993] env[61998]: DEBUG nova.network.neutron [-] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.525494] env[61998]: DEBUG nova.scheduler.client.report [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 624.726449] env[61998]: DEBUG nova.network.neutron [-] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.032140] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.934s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.032774] env[61998]: ERROR nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Traceback (most recent call last): [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self.driver.spawn(context, instance, image_meta, [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] vm_ref = self.build_virtual_machine(instance, [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.032774] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] for vif in network_info: [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return self._sync_wrapper(fn, *args, **kwargs) [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self.wait() [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self[:] = self._gt.wait() [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return self._exit_event.wait() [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] result = hub.switch() [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 625.033132] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return self.greenlet.switch() [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] result = function(*args, **kwargs) [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] return func(*args, **kwargs) [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] raise e [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] nwinfo = self.network_api.allocate_for_instance( [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] created_port_ids = self._update_ports_for_instance( [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] with excutils.save_and_reraise_exception(): [ 625.033426] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] self.force_reraise() [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] raise self.value [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] updated_port = self._update_port( [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] _ensure_no_port_binding_failure(port) [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] raise exception.PortBindingFailed(port_id=port['id']) [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] nova.exception.PortBindingFailed: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. [ 625.033764] env[61998]: ERROR nova.compute.manager [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] [ 625.034034] env[61998]: DEBUG nova.compute.utils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 625.034748] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.388s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.034955] env[61998]: DEBUG nova.objects.instance [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61998) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 625.037550] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Build of instance 975b0c65-6f57-4c7c-ae46-b23920a039f7 was re-scheduled: Binding failed for port bc42264c-bfda-4aab-8e36-54a4ffacdb53, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 625.038013] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 625.038261] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.038405] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquired lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.038561] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.229147] env[61998]: INFO nova.compute.manager [-] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Took 1.02 seconds to deallocate network for instance. [ 625.232017] env[61998]: DEBUG nova.compute.claims [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 625.232017] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.239041] env[61998]: INFO nova.scheduler.client.report [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Deleted allocations for instance e37ac276-8a3e-45b3-8176-d972eb4e6e3e [ 625.560495] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.638670] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.750104] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58239374-43de-4bf7-a9d2-26aa7b1b3172 tempest-ServerExternalEventsTest-420887420 tempest-ServerExternalEventsTest-420887420-project-member] Lock "e37ac276-8a3e-45b3-8176-d972eb4e6e3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.559s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.048063] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8454e179-1ce4-488f-ad6b-3f915cd4ccdf tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.049322] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.916s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.050659] env[61998]: DEBUG nova.objects.instance [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lazy-loading 'resources' on Instance uuid e4ada227-b79a-457a-b063-dde99840aa14 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 626.141744] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Releasing lock "refresh_cache-975b0c65-6f57-4c7c-ae46-b23920a039f7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.141926] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 626.142133] env[61998]: DEBUG nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 626.142300] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.158419] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.253537] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 626.662104] env[61998]: DEBUG nova.network.neutron [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.775604] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.962382] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98fdd4d-72ca-4c27-a408-03d8e41ce1e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.970027] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ca3cb7-d97f-440c-97cb-fb2f073ad2d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.001528] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e45b23b-b4d3-40fd-8bbe-aa5dc18f9ab6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.009652] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679d9a93-e8b5-488a-873c-739ad3a7222b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.022899] env[61998]: DEBUG nova.compute.provider_tree [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.168169] env[61998]: INFO nova.compute.manager [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 975b0c65-6f57-4c7c-ae46-b23920a039f7] Took 1.03 seconds to deallocate network for instance. [ 627.526652] env[61998]: DEBUG nova.scheduler.client.report [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 628.033609] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.983s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.038173] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.803s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.039836] env[61998]: INFO nova.compute.claims [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.062960] env[61998]: INFO nova.scheduler.client.report [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Deleted allocations for instance e4ada227-b79a-457a-b063-dde99840aa14 [ 628.194382] env[61998]: INFO nova.scheduler.client.report [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Deleted allocations for instance 975b0c65-6f57-4c7c-ae46-b23920a039f7 [ 628.576017] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0374f772-55c4-463c-a211-df84381dc388 tempest-ServersAdmin275Test-1350323747 tempest-ServersAdmin275Test-1350323747-project-member] Lock "e4ada227-b79a-457a-b063-dde99840aa14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.187s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.708502] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b0a2022-54cc-4bc4-b4c1-d371db21e74a tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "975b0c65-6f57-4c7c-ae46-b23920a039f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.613s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.211375] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 629.506338] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed107557-a42f-4ba0-8c83-4c760692e5a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.512933] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a568fb41-5579-4575-90cf-025b1a461b7a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.554199] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0778fe18-df04-44b8-8715-c9ce75397b2a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.561911] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874979c6-5831-49be-addd-4593b1441eb8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.576170] env[61998]: DEBUG nova.compute.provider_tree [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.736160] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.071551] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "56e74975-e4fa-4ff8-ab87-aa74125dab78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.071784] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "56e74975-e4fa-4ff8-ab87-aa74125dab78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.080637] env[61998]: DEBUG nova.scheduler.client.report [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 630.585651] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.586242] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 630.588851] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.269s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.994317] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "e632307a-ffe9-45a6-9224-8598aea5d269" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.998196] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "e632307a-ffe9-45a6-9224-8598aea5d269" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.028156] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "b9ec575c-034b-46bc-afbd-7a8a07a8e005" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.028419] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "b9ec575c-034b-46bc-afbd-7a8a07a8e005" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.101093] env[61998]: DEBUG nova.compute.utils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.102300] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 631.102485] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 631.192259] env[61998]: DEBUG nova.policy [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98e87e38a03745df8fa2fc3f91fffd64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d65c62abaf84b00b01f9a6b3d5df366', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 631.557254] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa5ab9d-b4b7-4aaf-9fc5-5d7bc01b7dcf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.566789] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527970af-1e51-439d-99c2-0e2ab074c58c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.602977] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0aa97f1-1f2f-43f2-b329-d871786a3827 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.603751] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 631.611408] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02e473f-c49e-404f-93bd-283705efec7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.625746] env[61998]: DEBUG nova.compute.provider_tree [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.903118] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Successfully created port: 5a029700-355b-4874-a77b-2768950fce1a {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.129945] env[61998]: DEBUG nova.scheduler.client.report [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 632.616937] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 632.635875] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.046s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.635875] env[61998]: ERROR nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. [ 632.635875] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Traceback (most recent call last): [ 632.635875] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 632.635875] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self.driver.spawn(context, instance, image_meta, [ 632.635875] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 632.635875] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.635875] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.635875] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] vm_ref = self.build_virtual_machine(instance, [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] for vif in network_info: [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return self._sync_wrapper(fn, *args, **kwargs) [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self.wait() [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self[:] = self._gt.wait() [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return self._exit_event.wait() [ 632.636273] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] result = hub.switch() [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return self.greenlet.switch() [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] result = function(*args, **kwargs) [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] return func(*args, **kwargs) [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] raise e [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] nwinfo = self.network_api.allocate_for_instance( [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.637244] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] created_port_ids = self._update_ports_for_instance( [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] with excutils.save_and_reraise_exception(): [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] self.force_reraise() [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] raise self.value [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] updated_port = self._update_port( [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] _ensure_no_port_binding_failure(port) [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.637616] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] raise exception.PortBindingFailed(port_id=port['id']) [ 632.637906] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] nova.exception.PortBindingFailed: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. [ 632.637906] env[61998]: ERROR nova.compute.manager [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] [ 632.642097] env[61998]: DEBUG nova.compute.utils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 632.642097] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.376s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.647949] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Build of instance 6236c44a-e3c6-4302-8f15-4eb8dfaf5960 was re-scheduled: Binding failed for port 3fd67133-0c2c-4c5e-88a3-769cf0a6176e, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 632.647949] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 632.647949] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.647949] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquired lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.648741] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.653576] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.654539] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.654539] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.654736] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.654997] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.659127] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.659127] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.659127] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.659127] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.659127] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.659293] env[61998]: DEBUG nova.virt.hardware [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.659293] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e1413b-5642-4a23-9ac7-1bd862425b04 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.670026] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a1a2ee-ceee-44b2-8f0e-0e93671d9e8e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.170155] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.284333] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.372285] env[61998]: DEBUG nova.compute.manager [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Received event network-changed-5a029700-355b-4874-a77b-2768950fce1a {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 633.372472] env[61998]: DEBUG nova.compute.manager [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Refreshing instance network info cache due to event network-changed-5a029700-355b-4874-a77b-2768950fce1a. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 633.372679] env[61998]: DEBUG oslo_concurrency.lockutils [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] Acquiring lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.372834] env[61998]: DEBUG oslo_concurrency.lockutils [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] Acquired lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.372962] env[61998]: DEBUG nova.network.neutron [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Refreshing network info cache for port 5a029700-355b-4874-a77b-2768950fce1a {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 633.687095] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26148918-274b-46f9-a8ea-aaebdba7f712 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.694696] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418066a6-1027-4c41-adff-421dc014e250 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.698699] env[61998]: ERROR nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. [ 633.698699] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 633.698699] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 633.698699] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 633.698699] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.698699] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 633.698699] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.698699] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 633.698699] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.698699] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 633.698699] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.698699] env[61998]: ERROR nova.compute.manager raise self.value [ 633.698699] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.698699] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 633.698699] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.698699] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 633.699108] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.699108] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 633.699108] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. [ 633.699108] env[61998]: ERROR nova.compute.manager [ 633.699108] env[61998]: Traceback (most recent call last): [ 633.699108] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 633.699108] env[61998]: listener.cb(fileno) [ 633.699108] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.699108] env[61998]: result = function(*args, **kwargs) [ 633.699108] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.699108] env[61998]: return func(*args, **kwargs) [ 633.699108] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 633.699108] env[61998]: raise e [ 633.699108] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 633.699108] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 633.699108] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.699108] env[61998]: created_port_ids = self._update_ports_for_instance( [ 633.699108] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.699108] env[61998]: with excutils.save_and_reraise_exception(): [ 633.699108] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.699108] env[61998]: self.force_reraise() [ 633.699108] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.699108] env[61998]: raise self.value [ 633.699108] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.699108] env[61998]: updated_port = self._update_port( [ 633.699108] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.699108] env[61998]: _ensure_no_port_binding_failure(port) [ 633.699108] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.699108] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 633.699831] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. [ 633.699831] env[61998]: Removing descriptor: 15 [ 633.699831] env[61998]: ERROR nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] Traceback (most recent call last): [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] yield resources [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self.driver.spawn(context, instance, image_meta, [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.699831] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] vm_ref = self.build_virtual_machine(instance, [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] for vif in network_info: [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return self._sync_wrapper(fn, *args, **kwargs) [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self.wait() [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self[:] = self._gt.wait() [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return self._exit_event.wait() [ 633.700140] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] result = hub.switch() [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return self.greenlet.switch() [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] result = function(*args, **kwargs) [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return func(*args, **kwargs) [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] raise e [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] nwinfo = self.network_api.allocate_for_instance( [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.700432] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] created_port_ids = self._update_ports_for_instance( [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] with excutils.save_and_reraise_exception(): [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self.force_reraise() [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] raise self.value [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] updated_port = self._update_port( [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] _ensure_no_port_binding_failure(port) [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.700745] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] raise exception.PortBindingFailed(port_id=port['id']) [ 633.701076] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] nova.exception.PortBindingFailed: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. [ 633.701076] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] [ 633.701076] env[61998]: INFO nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Terminating instance [ 633.702164] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.729710] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0700ada4-b9d5-451c-870a-276a7b7f02df {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.736913] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4f3945-4e72-4a08-9951-73df5472bbec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.750030] env[61998]: DEBUG nova.compute.provider_tree [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.790056] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Releasing lock "refresh_cache-6236c44a-e3c6-4302-8f15-4eb8dfaf5960" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.790285] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 633.790739] env[61998]: DEBUG nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 633.790739] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.811957] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.897702] env[61998]: DEBUG nova.network.neutron [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.947884] env[61998]: DEBUG nova.network.neutron [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.253902] env[61998]: DEBUG nova.scheduler.client.report [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 634.317713] env[61998]: DEBUG nova.network.neutron [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.450643] env[61998]: DEBUG oslo_concurrency.lockutils [req-c09fa2ab-732b-4a2d-921b-e4c09eaa9195 req-cb43e437-7272-4be6-9adc-cc43aa911c34 service nova] Releasing lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.451264] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquired lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.451264] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.759576] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.119s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.760253] env[61998]: ERROR nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Traceback (most recent call last): [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self.driver.spawn(context, instance, image_meta, [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] vm_ref = self.build_virtual_machine(instance, [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] vif_infos = vmwarevif.get_vif_info(self._session, [ 634.760253] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] for vif in network_info: [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] return self._sync_wrapper(fn, *args, **kwargs) [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self.wait() [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self[:] = self._gt.wait() [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] return self._exit_event.wait() [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] current.throw(*self._exc) [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 634.760544] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] result = function(*args, **kwargs) [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] return func(*args, **kwargs) [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] raise e [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] nwinfo = self.network_api.allocate_for_instance( [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] created_port_ids = self._update_ports_for_instance( [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] with excutils.save_and_reraise_exception(): [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] self.force_reraise() [ 634.760840] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] raise self.value [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] updated_port = self._update_port( [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] _ensure_no_port_binding_failure(port) [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] raise exception.PortBindingFailed(port_id=port['id']) [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] nova.exception.PortBindingFailed: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. [ 634.761150] env[61998]: ERROR nova.compute.manager [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] [ 634.761150] env[61998]: DEBUG nova.compute.utils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 634.764485] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Build of instance 71248677-92fb-4f66-b089-2cbbdc808bb7 was re-scheduled: Binding failed for port 349e36b8-9012-4828-a720-1886f9013fa2, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 634.764885] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 634.765113] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Acquiring lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.765264] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Acquired lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.765421] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.766351] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.951s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.766570] env[61998]: DEBUG nova.objects.instance [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lazy-loading 'resources' on Instance uuid a8f6254f-b867-4967-b4fa-bb70f471f89d {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 634.767775] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Acquiring lock "e49e77d6-8d5e-4d89-b129-ac34cd1969c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.767977] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Lock "e49e77d6-8d5e-4d89-b129-ac34cd1969c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.820826] env[61998]: INFO nova.compute.manager [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 6236c44a-e3c6-4302-8f15-4eb8dfaf5960] Took 1.03 seconds to deallocate network for instance. [ 634.970462] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.035543] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.286031] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.388920] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.436602] env[61998]: DEBUG nova.compute.manager [req-8a6ac1a1-6244-4239-b785-718d8e547ba3 req-15eab628-4b94-40f1-a561-0b97e12cfa86 service nova] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Received event network-vif-deleted-5a029700-355b-4874-a77b-2768950fce1a {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 635.537513] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Releasing lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.537937] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 635.538204] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 635.538500] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c0b3322-9b1a-48fb-8da5-746342be1eb4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.548134] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2227916d-8574-45dc-a580-03666b52c21f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.572590] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 70af5d32-254f-4819-8cca-c28346e48139 could not be found. [ 635.572810] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.572988] env[61998]: INFO nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Took 0.03 seconds to destroy the instance on the hypervisor. [ 635.573271] env[61998]: DEBUG oslo.service.loopingcall [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.575479] env[61998]: DEBUG nova.compute.manager [-] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 635.575583] env[61998]: DEBUG nova.network.neutron [-] [instance: 70af5d32-254f-4819-8cca-c28346e48139] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.590466] env[61998]: DEBUG nova.network.neutron [-] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.656643] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67e9afe-6bdb-4fe4-966c-8e6ebd8cb290 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.664729] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b890d95c-1d41-425a-9861-fdca74735f0d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.695167] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf10d9e-057d-4da3-99d9-3d5c95900913 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.702991] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bfe0b2-21a0-4df6-8958-5b36f9519c20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.715792] env[61998]: DEBUG nova.compute.provider_tree [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.851665] env[61998]: INFO nova.scheduler.client.report [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Deleted allocations for instance 6236c44a-e3c6-4302-8f15-4eb8dfaf5960 [ 635.892345] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Releasing lock "refresh_cache-71248677-92fb-4f66-b089-2cbbdc808bb7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.892588] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 635.892765] env[61998]: DEBUG nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 635.892925] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.913653] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.092521] env[61998]: DEBUG nova.network.neutron [-] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.219141] env[61998]: DEBUG nova.scheduler.client.report [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 636.362021] env[61998]: DEBUG oslo_concurrency.lockutils [None req-040aa671-7a16-4f90-9b08-ccd686b6da39 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "6236c44a-e3c6-4302-8f15-4eb8dfaf5960" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.099s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.416317] env[61998]: DEBUG nova.network.neutron [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.597109] env[61998]: INFO nova.compute.manager [-] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Took 1.02 seconds to deallocate network for instance. [ 636.598109] env[61998]: DEBUG nova.compute.claims [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 636.598935] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.725213] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.959s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.728609] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.735s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.761079] env[61998]: INFO nova.scheduler.client.report [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Deleted allocations for instance a8f6254f-b867-4967-b4fa-bb70f471f89d [ 636.863738] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 636.919015] env[61998]: INFO nova.compute.manager [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] [instance: 71248677-92fb-4f66-b089-2cbbdc808bb7] Took 1.03 seconds to deallocate network for instance. [ 637.276183] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f3a30a7-95b5-4641-afc3-240124638660 tempest-ServerShowV257Test-44528303 tempest-ServerShowV257Test-44528303-project-member] Lock "a8f6254f-b867-4967-b4fa-bb70f471f89d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.165s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.383180] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.650162] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0900af6-96ba-490b-a86d-a306d0c73d5f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.658294] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d378cfaf-b41d-4646-94d4-5a57361e55a8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.689333] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b30d62d-8a66-4497-bf66-81e1819e2f2c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.697203] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f559dad9-b540-419d-aa59-91f670c3417c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.712243] env[61998]: DEBUG nova.compute.provider_tree [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.952017] env[61998]: INFO nova.scheduler.client.report [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Deleted allocations for instance 71248677-92fb-4f66-b089-2cbbdc808bb7 [ 638.215546] env[61998]: DEBUG nova.scheduler.client.report [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 638.461607] env[61998]: DEBUG oslo_concurrency.lockutils [None req-766a3c1e-4e5e-4b67-84f8-1daf06ef8c9b tempest-InstanceActionsNegativeTestJSON-819169083 tempest-InstanceActionsNegativeTestJSON-819169083-project-member] Lock "71248677-92fb-4f66-b089-2cbbdc808bb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.891s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.724023] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.993s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.724023] env[61998]: ERROR nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. [ 638.724023] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Traceback (most recent call last): [ 638.724023] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 638.724023] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self.driver.spawn(context, instance, image_meta, [ 638.724023] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 638.724023] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self._vmops.spawn(context, instance, image_meta, injected_files, [ 638.724023] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 638.724023] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] vm_ref = self.build_virtual_machine(instance, [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] vif_infos = vmwarevif.get_vif_info(self._session, [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] for vif in network_info: [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return self._sync_wrapper(fn, *args, **kwargs) [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self.wait() [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self[:] = self._gt.wait() [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return self._exit_event.wait() [ 638.724578] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] result = hub.switch() [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return self.greenlet.switch() [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] result = function(*args, **kwargs) [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] return func(*args, **kwargs) [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] raise e [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] nwinfo = self.network_api.allocate_for_instance( [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 638.724905] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] created_port_ids = self._update_ports_for_instance( [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] with excutils.save_and_reraise_exception(): [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] self.force_reraise() [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] raise self.value [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] updated_port = self._update_port( [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] _ensure_no_port_binding_failure(port) [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 638.725244] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] raise exception.PortBindingFailed(port_id=port['id']) [ 638.725598] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] nova.exception.PortBindingFailed: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. [ 638.725598] env[61998]: ERROR nova.compute.manager [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] [ 638.725598] env[61998]: DEBUG nova.compute.utils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 638.725598] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.348s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.726722] env[61998]: INFO nova.compute.claims [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.732829] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Build of instance fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57 was re-scheduled: Binding failed for port 57786bf5-f441-4e5c-9e58-ace0f626cc7b, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 638.733295] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 638.733530] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Acquiring lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.733662] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Acquired lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.733815] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.744336] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "3f2ed06f-27d0-4a73-a678-430db5334147" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.744557] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "3f2ed06f-27d0-4a73-a678-430db5334147" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.773240] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.888640] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.963607] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 639.391117] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Releasing lock "refresh_cache-fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.391360] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 639.391545] env[61998]: DEBUG nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 639.391708] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 639.408879] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.488626] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.913099] env[61998]: DEBUG nova.network.neutron [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.142256] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6010a00-4ec6-491e-8e80-03cc412531e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.151218] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a277aa90-1679-4c6f-8eaa-b9e10ffe4f13 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.184902] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa05a435-eb5e-4d45-bb0b-1373c9dd1bdd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.193375] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7103a659-fc8b-427b-a6cd-5a486a5145a2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.206926] env[61998]: DEBUG nova.compute.provider_tree [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.416021] env[61998]: INFO nova.compute.manager [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] [instance: fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57] Took 1.02 seconds to deallocate network for instance. [ 640.710884] env[61998]: DEBUG nova.scheduler.client.report [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 641.217682] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.218253] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 641.220892] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.909s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.222261] env[61998]: INFO nova.compute.claims [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.450142] env[61998]: INFO nova.scheduler.client.report [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Deleted allocations for instance fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57 [ 641.728812] env[61998]: DEBUG nova.compute.utils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.731156] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 641.731341] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 641.800827] env[61998]: DEBUG nova.policy [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f69fb51cac434f25ad53ba9bfe8d480f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7146aa9cae39448ca92f4f966e1d4daf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 641.961214] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b49348a6-2479-402e-aa1d-c614614db13e tempest-ImagesOneServerTestJSON-1130644034 tempest-ImagesOneServerTestJSON-1130644034-project-member] Lock "fc1f08e8-fd0a-4b84-bc8f-fe3a27136c57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.323s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.101301] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Successfully created port: af39bae6-3885-49eb-ba2c-1a564b5a9f59 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.231461] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 642.464629] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 642.635163] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba7949f-b83c-4ce7-b0d8-623dd02005cc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.642990] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9a3166-1d8d-417b-b332-9cca04da11e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.673927] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb0e67b-b8d9-418f-9b0e-acd2b151a5c1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.681222] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b33ce2f-1c19-46ae-868a-c654c8d7977b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.694256] env[61998]: DEBUG nova.compute.provider_tree [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.953570] env[61998]: DEBUG nova.compute.manager [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Received event network-changed-af39bae6-3885-49eb-ba2c-1a564b5a9f59 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 642.953771] env[61998]: DEBUG nova.compute.manager [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Refreshing instance network info cache due to event network-changed-af39bae6-3885-49eb-ba2c-1a564b5a9f59. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 642.953973] env[61998]: DEBUG oslo_concurrency.lockutils [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] Acquiring lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.954129] env[61998]: DEBUG oslo_concurrency.lockutils [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] Acquired lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.954286] env[61998]: DEBUG nova.network.neutron [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Refreshing network info cache for port af39bae6-3885-49eb-ba2c-1a564b5a9f59 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.993363] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.169169] env[61998]: ERROR nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. [ 643.169169] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 643.169169] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.169169] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 643.169169] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.169169] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 643.169169] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.169169] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 643.169169] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.169169] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 643.169169] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.169169] env[61998]: ERROR nova.compute.manager raise self.value [ 643.169169] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.169169] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 643.169169] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.169169] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 643.169697] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.169697] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 643.169697] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. [ 643.169697] env[61998]: ERROR nova.compute.manager [ 643.169697] env[61998]: Traceback (most recent call last): [ 643.169697] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 643.169697] env[61998]: listener.cb(fileno) [ 643.169697] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.169697] env[61998]: result = function(*args, **kwargs) [ 643.169697] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 643.169697] env[61998]: return func(*args, **kwargs) [ 643.169697] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 643.169697] env[61998]: raise e [ 643.169697] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.169697] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 643.169697] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.169697] env[61998]: created_port_ids = self._update_ports_for_instance( [ 643.169697] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.169697] env[61998]: with excutils.save_and_reraise_exception(): [ 643.169697] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.169697] env[61998]: self.force_reraise() [ 643.169697] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.169697] env[61998]: raise self.value [ 643.169697] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.169697] env[61998]: updated_port = self._update_port( [ 643.169697] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.169697] env[61998]: _ensure_no_port_binding_failure(port) [ 643.169697] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.169697] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 643.170529] env[61998]: nova.exception.PortBindingFailed: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. [ 643.170529] env[61998]: Removing descriptor: 15 [ 643.198718] env[61998]: DEBUG nova.scheduler.client.report [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 643.248612] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 643.277362] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:50:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='075d8c31-459d-4952-a963-a9fd701ccd17',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1956429492',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 643.277598] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 643.277750] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.277967] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 643.278087] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.278237] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 643.278472] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 643.278633] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 643.278795] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 643.278976] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 643.281389] env[61998]: DEBUG nova.virt.hardware [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 643.283408] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b16507d-4602-45df-bfd0-90d489beea41 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.291565] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d422b1b5-2ed5-4202-ab06-bbbcba3f11f3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.308193] env[61998]: ERROR nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Traceback (most recent call last): [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] yield resources [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self.driver.spawn(context, instance, image_meta, [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] vm_ref = self.build_virtual_machine(instance, [ 643.308193] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] for vif in network_info: [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] return self._sync_wrapper(fn, *args, **kwargs) [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self.wait() [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self[:] = self._gt.wait() [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] return self._exit_event.wait() [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 643.308519] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] current.throw(*self._exc) [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] result = function(*args, **kwargs) [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] return func(*args, **kwargs) [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] raise e [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] nwinfo = self.network_api.allocate_for_instance( [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] created_port_ids = self._update_ports_for_instance( [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] with excutils.save_and_reraise_exception(): [ 643.308827] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self.force_reraise() [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] raise self.value [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] updated_port = self._update_port( [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] _ensure_no_port_binding_failure(port) [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] raise exception.PortBindingFailed(port_id=port['id']) [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] nova.exception.PortBindingFailed: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. [ 643.309148] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] [ 643.309148] env[61998]: INFO nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Terminating instance [ 643.312251] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.473747] env[61998]: DEBUG nova.network.neutron [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.529647] env[61998]: DEBUG nova.network.neutron [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.703844] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.704407] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 643.706964] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 22.495s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.707175] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.707326] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 643.707830] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.428s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.709540] env[61998]: INFO nova.compute.claims [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.712014] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a36fef-e46b-41a9-9141-5679454fa8b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.720908] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1475dfbc-7bb1-4aee-a685-35702a05499b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.736289] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54097e0-c146-4623-b1d2-d2f92f66d3c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.743681] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c927f96-25b7-4aef-b740-3a5cf5d8503e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.774723] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181348MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 643.774882] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.032605] env[61998]: DEBUG oslo_concurrency.lockutils [req-d9c5d4b9-15bc-48d4-8fca-c8b1721c409c req-0953a3c8-0fbe-487f-962d-d5a5e1bd1efd service nova] Releasing lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.033033] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquired lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.033224] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.213539] env[61998]: DEBUG nova.compute.utils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 644.214963] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 644.215145] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 644.286745] env[61998]: DEBUG nova.policy [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4601d5806157453b9534229de3500922', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc2f058d2c1d4861ab597e5c4e2e3146', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 644.551786] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.624139] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.666797] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Successfully created port: 00fab4e9-12f4-49c9-a91b-9797f7c881c4 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 644.719871] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 644.986219] env[61998]: DEBUG nova.compute.manager [req-3733b83e-daa8-485d-bf96-73f68b5a21b2 req-b34b4ee6-679d-43e7-adb5-8e4253314354 service nova] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Received event network-vif-deleted-af39bae6-3885-49eb-ba2c-1a564b5a9f59 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 645.126326] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Releasing lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.126814] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 645.127019] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 645.127327] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f64ae671-8dda-48f6-a87c-3ab38a1aedcb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.140555] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf17f36a-47d6-4c13-a4fa-13d8e6388f0f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.167949] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8672c282-1a66-49b5-9c22-7136b567a52c could not be found. [ 645.168214] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 645.168418] env[61998]: INFO nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 645.168669] env[61998]: DEBUG oslo.service.loopingcall [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.170809] env[61998]: DEBUG nova.compute.manager [-] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 645.170907] env[61998]: DEBUG nova.network.neutron [-] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 645.173169] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba66cf00-26c9-4f35-8239-91918c5251f6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.180453] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8549d15e-956d-4800-850e-081e6c240e56 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.211823] env[61998]: DEBUG nova.network.neutron [-] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.213514] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4f2818-ac95-495b-8c96-de20a735d4bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.224715] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786429d5-6884-43e0-a53a-c81f298be5d0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.243358] env[61998]: DEBUG nova.compute.provider_tree [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.661634] env[61998]: ERROR nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. [ 645.661634] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 645.661634] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 645.661634] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 645.661634] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.661634] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 645.661634] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.661634] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 645.661634] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.661634] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 645.661634] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.661634] env[61998]: ERROR nova.compute.manager raise self.value [ 645.661634] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.661634] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 645.661634] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.661634] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 645.662318] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.662318] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 645.662318] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. [ 645.662318] env[61998]: ERROR nova.compute.manager [ 645.662318] env[61998]: Traceback (most recent call last): [ 645.662318] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 645.662318] env[61998]: listener.cb(fileno) [ 645.662318] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.662318] env[61998]: result = function(*args, **kwargs) [ 645.662318] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 645.662318] env[61998]: return func(*args, **kwargs) [ 645.662318] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 645.662318] env[61998]: raise e [ 645.662318] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 645.662318] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 645.662318] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.662318] env[61998]: created_port_ids = self._update_ports_for_instance( [ 645.662318] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.662318] env[61998]: with excutils.save_and_reraise_exception(): [ 645.662318] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.662318] env[61998]: self.force_reraise() [ 645.662318] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.662318] env[61998]: raise self.value [ 645.662318] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.662318] env[61998]: updated_port = self._update_port( [ 645.662318] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.662318] env[61998]: _ensure_no_port_binding_failure(port) [ 645.662318] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.662318] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 645.663160] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. [ 645.663160] env[61998]: Removing descriptor: 15 [ 645.718248] env[61998]: DEBUG nova.network.neutron [-] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.748131] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 645.751023] env[61998]: DEBUG nova.scheduler.client.report [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 645.786018] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 645.786018] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 645.786018] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 645.786217] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 645.786217] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 645.786217] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 645.786872] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 645.790090] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 645.790090] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 645.790090] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 645.790090] env[61998]: DEBUG nova.virt.hardware [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 645.790090] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0ba477-f4c8-45a2-a48f-5d78a9aa187c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.799599] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fb4cf7-5541-4a54-a04e-a85bea60983d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.816090] env[61998]: ERROR nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Traceback (most recent call last): [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] yield resources [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self.driver.spawn(context, instance, image_meta, [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] vm_ref = self.build_virtual_machine(instance, [ 645.816090] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] for vif in network_info: [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] return self._sync_wrapper(fn, *args, **kwargs) [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self.wait() [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self[:] = self._gt.wait() [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] return self._exit_event.wait() [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 645.817050] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] current.throw(*self._exc) [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] result = function(*args, **kwargs) [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] return func(*args, **kwargs) [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] raise e [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] nwinfo = self.network_api.allocate_for_instance( [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] created_port_ids = self._update_ports_for_instance( [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] with excutils.save_and_reraise_exception(): [ 645.818320] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self.force_reraise() [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] raise self.value [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] updated_port = self._update_port( [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] _ensure_no_port_binding_failure(port) [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] raise exception.PortBindingFailed(port_id=port['id']) [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] nova.exception.PortBindingFailed: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. [ 645.819117] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] [ 645.819117] env[61998]: INFO nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Terminating instance [ 645.819444] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.819444] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquired lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.819444] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.220602] env[61998]: INFO nova.compute.manager [-] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Took 1.05 seconds to deallocate network for instance. [ 646.223797] env[61998]: DEBUG nova.compute.claims [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 646.223974] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.256018] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.256569] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 646.259841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.028s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.446378] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.494292] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.766357] env[61998]: DEBUG nova.compute.utils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.767778] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 646.767942] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 646.882834] env[61998]: DEBUG nova.policy [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4601d5806157453b9534229de3500922', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc2f058d2c1d4861ab597e5c4e2e3146', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 646.996729] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Releasing lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.999022] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 646.999022] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 646.999022] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-372d986d-1913-42de-8740-71c93c35be30 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.006776] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acc7123-4b01-4ec2-b371-3ef1fe7674c0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.032655] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a91b0c5-d852-424a-b576-3d2c76860b06 could not be found. [ 647.032881] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.033073] env[61998]: INFO nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Took 0.04 seconds to destroy the instance on the hypervisor. [ 647.033320] env[61998]: DEBUG oslo.service.loopingcall [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.035802] env[61998]: DEBUG nova.compute.manager [-] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 647.035898] env[61998]: DEBUG nova.network.neutron [-] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.065672] env[61998]: DEBUG nova.network.neutron [-] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.121697] env[61998]: DEBUG nova.compute.manager [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Received event network-changed-00fab4e9-12f4-49c9-a91b-9797f7c881c4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 647.122719] env[61998]: DEBUG nova.compute.manager [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Refreshing instance network info cache due to event network-changed-00fab4e9-12f4-49c9-a91b-9797f7c881c4. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 647.122719] env[61998]: DEBUG oslo_concurrency.lockutils [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] Acquiring lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.122719] env[61998]: DEBUG oslo_concurrency.lockutils [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] Acquired lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.122719] env[61998]: DEBUG nova.network.neutron [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Refreshing network info cache for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.250440] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b94957-df5e-4494-9f30-df79d23f49fb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.258525] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Successfully created port: d0d9f3dd-2904-460e-8c8d-0fead5382ec2 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.263798] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c201889-c266-4b35-9772-c3c6bffee456 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.297378] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 647.300849] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acc75ca-0351-40ec-913a-16616a51cd06 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.309118] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98e32f3-ab0a-4903-a50c-ea8fcc494604 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.323029] env[61998]: DEBUG nova.compute.provider_tree [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.569695] env[61998]: DEBUG nova.network.neutron [-] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.644317] env[61998]: DEBUG nova.network.neutron [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.703641] env[61998]: DEBUG nova.network.neutron [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.827078] env[61998]: DEBUG nova.scheduler.client.report [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 647.990700] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "f5c91aad-0f8a-4ad3-8566-7f36ff983575" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.991038] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "f5c91aad-0f8a-4ad3-8566-7f36ff983575" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.071825] env[61998]: INFO nova.compute.manager [-] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Took 1.04 seconds to deallocate network for instance. [ 648.074060] env[61998]: DEBUG nova.compute.claims [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 648.074238] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.206525] env[61998]: DEBUG oslo_concurrency.lockutils [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] Releasing lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.208941] env[61998]: DEBUG nova.compute.manager [req-f3e81595-2ad9-4a37-b616-431a9b9578e2 req-83577826-f062-4a0e-878f-1cf9448e2cc4 service nova] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Received event network-vif-deleted-00fab4e9-12f4-49c9-a91b-9797f7c881c4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 648.269473] env[61998]: ERROR nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. [ 648.269473] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 648.269473] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 648.269473] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 648.269473] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.269473] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 648.269473] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.269473] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 648.269473] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.269473] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 648.269473] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.269473] env[61998]: ERROR nova.compute.manager raise self.value [ 648.269473] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.269473] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 648.269473] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.269473] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 648.269926] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.269926] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 648.269926] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. [ 648.269926] env[61998]: ERROR nova.compute.manager [ 648.269926] env[61998]: Traceback (most recent call last): [ 648.269926] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 648.269926] env[61998]: listener.cb(fileno) [ 648.269926] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.269926] env[61998]: result = function(*args, **kwargs) [ 648.269926] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 648.269926] env[61998]: return func(*args, **kwargs) [ 648.269926] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 648.269926] env[61998]: raise e [ 648.269926] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 648.269926] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 648.269926] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.269926] env[61998]: created_port_ids = self._update_ports_for_instance( [ 648.269926] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.269926] env[61998]: with excutils.save_and_reraise_exception(): [ 648.269926] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.269926] env[61998]: self.force_reraise() [ 648.269926] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.269926] env[61998]: raise self.value [ 648.269926] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.269926] env[61998]: updated_port = self._update_port( [ 648.269926] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.269926] env[61998]: _ensure_no_port_binding_failure(port) [ 648.269926] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.269926] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 648.270650] env[61998]: nova.exception.PortBindingFailed: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. [ 648.270650] env[61998]: Removing descriptor: 15 [ 648.310108] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 648.338264] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 648.338585] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 648.338778] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.338988] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 648.339185] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.339360] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 648.340553] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 648.340553] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 648.340553] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 648.340553] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 648.340553] env[61998]: DEBUG nova.virt.hardware [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 648.341252] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.081s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.341847] env[61998]: ERROR nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Traceback (most recent call last): [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self.driver.spawn(context, instance, image_meta, [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] vm_ref = self.build_virtual_machine(instance, [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] vif_infos = vmwarevif.get_vif_info(self._session, [ 648.341847] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] for vif in network_info: [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] return self._sync_wrapper(fn, *args, **kwargs) [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self.wait() [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self[:] = self._gt.wait() [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] return self._exit_event.wait() [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] current.throw(*self._exc) [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.342196] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] result = function(*args, **kwargs) [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] return func(*args, **kwargs) [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] raise e [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] nwinfo = self.network_api.allocate_for_instance( [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] created_port_ids = self._update_ports_for_instance( [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] with excutils.save_and_reraise_exception(): [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] self.force_reraise() [ 648.342553] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] raise self.value [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] updated_port = self._update_port( [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] _ensure_no_port_binding_failure(port) [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] raise exception.PortBindingFailed(port_id=port['id']) [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] nova.exception.PortBindingFailed: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. [ 648.342914] env[61998]: ERROR nova.compute.manager [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] [ 648.342914] env[61998]: DEBUG nova.compute.utils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 648.344571] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a73c56c-32f6-4ba6-a539-08982d504f61 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.347622] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Build of instance b4706725-5e28-4d2a-b4a8-7633ffa63afe was re-scheduled: Binding failed for port 60bdf1b2-ec40-492d-9a63-9aadcf1aaed4, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 648.348094] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 648.348357] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Acquiring lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.348577] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Acquired lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.348772] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.349758] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.574s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.352991] env[61998]: INFO nova.compute.claims [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.361168] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baaffdea-b376-424c-839c-a32bc3b634e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.375797] env[61998]: ERROR nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Traceback (most recent call last): [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] yield resources [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self.driver.spawn(context, instance, image_meta, [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] vm_ref = self.build_virtual_machine(instance, [ 648.375797] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] vif_infos = vmwarevif.get_vif_info(self._session, [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] for vif in network_info: [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] return self._sync_wrapper(fn, *args, **kwargs) [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self.wait() [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self[:] = self._gt.wait() [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] return self._exit_event.wait() [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 648.376204] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] current.throw(*self._exc) [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] result = function(*args, **kwargs) [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] return func(*args, **kwargs) [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] raise e [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] nwinfo = self.network_api.allocate_for_instance( [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] created_port_ids = self._update_ports_for_instance( [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] with excutils.save_and_reraise_exception(): [ 648.376688] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self.force_reraise() [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] raise self.value [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] updated_port = self._update_port( [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] _ensure_no_port_binding_failure(port) [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] raise exception.PortBindingFailed(port_id=port['id']) [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] nova.exception.PortBindingFailed: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. [ 648.377090] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] [ 648.377090] env[61998]: INFO nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Terminating instance [ 648.378647] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.378846] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquired lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.379046] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.873015] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.897078] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.946089] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.958514] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.145073] env[61998]: DEBUG nova.compute.manager [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Received event network-changed-d0d9f3dd-2904-460e-8c8d-0fead5382ec2 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 649.145330] env[61998]: DEBUG nova.compute.manager [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Refreshing instance network info cache due to event network-changed-d0d9f3dd-2904-460e-8c8d-0fead5382ec2. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 649.145515] env[61998]: DEBUG oslo_concurrency.lockutils [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] Acquiring lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.448860] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Releasing lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.449330] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 649.449483] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 649.449819] env[61998]: DEBUG oslo_concurrency.lockutils [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] Acquired lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.449973] env[61998]: DEBUG nova.network.neutron [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Refreshing network info cache for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 649.451507] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80105af0-6e2f-4c14-9d7b-4d2fb9167a46 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.461658] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Releasing lock "refresh_cache-b4706725-5e28-4d2a-b4a8-7633ffa63afe" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.461877] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 649.462072] env[61998]: DEBUG nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 649.462234] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 649.466610] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a60f4c4-73e5-4493-a778-15895cda52d5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.492479] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d680f38-bd47-4aeb-8845-efa20667623b could not be found. [ 649.492572] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 649.492750] env[61998]: INFO nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 649.493042] env[61998]: DEBUG oslo.service.loopingcall [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.495812] env[61998]: DEBUG nova.compute.manager [-] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 649.495914] env[61998]: DEBUG nova.network.neutron [-] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 649.498358] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.517137] env[61998]: DEBUG nova.network.neutron [-] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.788357] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b93ca5-eaae-4feb-8bdb-c44a157efb80 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.797761] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad76f72d-842e-4a41-bce6-48ad44aa24ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.826571] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992982b5-93f2-4698-8a4e-18d727741a56 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.833432] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eadf137-1231-4410-a9a3-c1f1dc4a06a7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.846132] env[61998]: DEBUG nova.compute.provider_tree [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.968789] env[61998]: DEBUG nova.network.neutron [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.001863] env[61998]: DEBUG nova.network.neutron [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.012204] env[61998]: DEBUG nova.network.neutron [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.019696] env[61998]: DEBUG nova.network.neutron [-] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.350033] env[61998]: DEBUG nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 650.504810] env[61998]: INFO nova.compute.manager [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] [instance: b4706725-5e28-4d2a-b4a8-7633ffa63afe] Took 1.04 seconds to deallocate network for instance. [ 650.514952] env[61998]: DEBUG oslo_concurrency.lockutils [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] Releasing lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.515240] env[61998]: DEBUG nova.compute.manager [req-25230a37-7633-48f0-b0db-68b16cf4ecbf req-f2401663-45aa-485e-8987-c45a88f0de7d service nova] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Received event network-vif-deleted-d0d9f3dd-2904-460e-8c8d-0fead5382ec2 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 650.521846] env[61998]: INFO nova.compute.manager [-] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Took 1.03 seconds to deallocate network for instance. [ 650.524526] env[61998]: DEBUG nova.compute.claims [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 650.524526] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.854831] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.505s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.855465] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 650.858408] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.123s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.859749] env[61998]: INFO nova.compute.claims [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 651.364870] env[61998]: DEBUG nova.compute.utils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 651.368032] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 651.368032] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.414394] env[61998]: DEBUG nova.policy [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ffba8b1830244d091b15706a3f986d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad8f2951990c4b3b9059adcd4f0e65dd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 651.533810] env[61998]: INFO nova.scheduler.client.report [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Deleted allocations for instance b4706725-5e28-4d2a-b4a8-7633ffa63afe [ 651.729890] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Successfully created port: f224758a-f6d5-4302-9abb-9f5d2ae94568 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.869297] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 652.043688] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6a2c9675-90f7-4393-9c5f-191b444f8510 tempest-ServerActionsTestOtherB-2084499513 tempest-ServerActionsTestOtherB-2084499513-project-member] Lock "b4706725-5e28-4d2a-b4a8-7633ffa63afe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.803s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.217027] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9d88ab-149d-4eec-a323-f72e63c70bfc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.225324] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b658127d-157a-4c4a-8a48-d5cf0e32a80c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.256948] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0c5091-e926-4b52-a208-25f26fea18f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.264601] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a0c8b7-9b0b-43dc-8c20-fb0dd943c9fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.279781] env[61998]: DEBUG nova.compute.provider_tree [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.512026] env[61998]: DEBUG nova.compute.manager [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Received event network-changed-f224758a-f6d5-4302-9abb-9f5d2ae94568 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 652.512260] env[61998]: DEBUG nova.compute.manager [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Refreshing instance network info cache due to event network-changed-f224758a-f6d5-4302-9abb-9f5d2ae94568. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 652.512401] env[61998]: DEBUG oslo_concurrency.lockutils [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] Acquiring lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.512554] env[61998]: DEBUG oslo_concurrency.lockutils [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] Acquired lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.512736] env[61998]: DEBUG nova.network.neutron [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Refreshing network info cache for port f224758a-f6d5-4302-9abb-9f5d2ae94568 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.546217] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 652.714126] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. [ 652.714126] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 652.714126] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 652.714126] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 652.714126] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.714126] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 652.714126] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.714126] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 652.714126] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.714126] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 652.714126] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.714126] env[61998]: ERROR nova.compute.manager raise self.value [ 652.714126] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.714126] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 652.714126] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.714126] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 652.714550] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.714550] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 652.714550] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. [ 652.714550] env[61998]: ERROR nova.compute.manager [ 652.714550] env[61998]: Traceback (most recent call last): [ 652.714550] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 652.714550] env[61998]: listener.cb(fileno) [ 652.714550] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.714550] env[61998]: result = function(*args, **kwargs) [ 652.714550] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.714550] env[61998]: return func(*args, **kwargs) [ 652.714550] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 652.714550] env[61998]: raise e [ 652.714550] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 652.714550] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 652.714550] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.714550] env[61998]: created_port_ids = self._update_ports_for_instance( [ 652.714550] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.714550] env[61998]: with excutils.save_and_reraise_exception(): [ 652.714550] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.714550] env[61998]: self.force_reraise() [ 652.714550] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.714550] env[61998]: raise self.value [ 652.714550] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.714550] env[61998]: updated_port = self._update_port( [ 652.714550] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.714550] env[61998]: _ensure_no_port_binding_failure(port) [ 652.714550] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.714550] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 652.715261] env[61998]: nova.exception.PortBindingFailed: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. [ 652.715261] env[61998]: Removing descriptor: 17 [ 652.783536] env[61998]: DEBUG nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 652.881853] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 652.913371] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 652.913614] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 652.913767] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.913943] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 652.914095] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.914240] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 652.914441] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 652.914598] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 652.914763] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 652.914924] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 652.915112] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 652.915982] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c048e5-71cb-403e-9d9a-30c57ce2b091 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.927159] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b091659-4c6e-41f6-bdc6-6a6ca5e6f430 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.940406] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Traceback (most recent call last): [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] yield resources [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self.driver.spawn(context, instance, image_meta, [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] vm_ref = self.build_virtual_machine(instance, [ 652.940406] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] for vif in network_info: [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] return self._sync_wrapper(fn, *args, **kwargs) [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self.wait() [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self[:] = self._gt.wait() [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] return self._exit_event.wait() [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 652.940873] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] current.throw(*self._exc) [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] result = function(*args, **kwargs) [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] return func(*args, **kwargs) [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] raise e [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] nwinfo = self.network_api.allocate_for_instance( [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] created_port_ids = self._update_ports_for_instance( [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] with excutils.save_and_reraise_exception(): [ 652.941261] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self.force_reraise() [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] raise self.value [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] updated_port = self._update_port( [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] _ensure_no_port_binding_failure(port) [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] raise exception.PortBindingFailed(port_id=port['id']) [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] nova.exception.PortBindingFailed: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. [ 652.941635] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] [ 652.941635] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Terminating instance [ 652.944132] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.034355] env[61998]: DEBUG nova.network.neutron [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.072088] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.113462] env[61998]: DEBUG nova.network.neutron [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.289424] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.290847] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 653.292196] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.693s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.616545] env[61998]: DEBUG oslo_concurrency.lockutils [req-9c2fc0d0-9632-483d-bcb1-022809e368bd req-81a7dbfc-47f7-428b-8741-69c699d835b3 service nova] Releasing lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.616987] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquired lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.617188] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.797515] env[61998]: DEBUG nova.compute.utils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 653.802775] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 653.802982] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.886593] env[61998]: DEBUG nova.policy [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ffba8b1830244d091b15706a3f986d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad8f2951990c4b3b9059adcd4f0e65dd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 654.138702] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.255856] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Successfully created port: d0951916-cda6-44b6-b427-5e6db7f3910d {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.260885] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73db46e1-da4d-43a5-8096-68868e97cb74 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.269263] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983bbb6c-bca2-4e26-9b21-a42983cb0a3f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.303763] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 654.307959] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.309083] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b86d029-7781-43d3-a369-0caff591815f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.316909] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd7b206-3408-4322-8a9c-a9cd02816435 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.331894] env[61998]: DEBUG nova.compute.provider_tree [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.537708] env[61998]: DEBUG nova.compute.manager [req-8a1d6a62-b2aa-4b7b-b80b-71f19faf8de5 req-482f874c-caef-4ab1-b25a-16a9f2428c64 service nova] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Received event network-vif-deleted-f224758a-f6d5-4302-9abb-9f5d2ae94568 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 654.816152] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Releasing lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.816659] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 654.816917] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 654.817508] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8225ccc-e08c-4503-93d6-0be10b3f8714 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.827612] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26607ef4-8f85-4947-a68e-cbc77334393c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.842617] env[61998]: DEBUG nova.scheduler.client.report [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 654.863287] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e730a03f-64c0-4e94-bc66-d3006be8b3ca could not be found. [ 654.863792] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.863918] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Took 0.05 seconds to destroy the instance on the hypervisor. [ 654.864165] env[61998]: DEBUG oslo.service.loopingcall [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.865136] env[61998]: DEBUG nova.compute.manager [-] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 654.865229] env[61998]: DEBUG nova.network.neutron [-] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.890644] env[61998]: DEBUG nova.network.neutron [-] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.319181] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 655.344013] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.344881] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.344881] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.344881] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.344881] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.344881] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.345127] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.345289] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.345750] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.345977] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.346160] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.347433] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e3c146-c379-45ba-9189-eeb086d7e648 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.351157] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. [ 655.351157] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 655.351157] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 655.351157] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 655.351157] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.351157] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 655.351157] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.351157] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 655.351157] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.351157] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 655.351157] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.351157] env[61998]: ERROR nova.compute.manager raise self.value [ 655.351157] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.351157] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 655.351157] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.351157] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 655.351966] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.351966] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 655.351966] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. [ 655.351966] env[61998]: ERROR nova.compute.manager [ 655.351966] env[61998]: Traceback (most recent call last): [ 655.351966] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 655.351966] env[61998]: listener.cb(fileno) [ 655.351966] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.351966] env[61998]: result = function(*args, **kwargs) [ 655.351966] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.351966] env[61998]: return func(*args, **kwargs) [ 655.351966] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 655.351966] env[61998]: raise e [ 655.351966] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 655.351966] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 655.351966] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.351966] env[61998]: created_port_ids = self._update_ports_for_instance( [ 655.351966] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.351966] env[61998]: with excutils.save_and_reraise_exception(): [ 655.351966] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.351966] env[61998]: self.force_reraise() [ 655.351966] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.351966] env[61998]: raise self.value [ 655.351966] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.351966] env[61998]: updated_port = self._update_port( [ 655.351966] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.351966] env[61998]: _ensure_no_port_binding_failure(port) [ 655.351966] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.351966] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 655.353096] env[61998]: nova.exception.PortBindingFailed: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. [ 655.353096] env[61998]: Removing descriptor: 17 [ 655.353096] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.060s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.353096] env[61998]: ERROR nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. [ 655.353096] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] Traceback (most recent call last): [ 655.353096] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 655.353096] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self.driver.spawn(context, instance, image_meta, [ 655.353096] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 655.353096] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self._vmops.spawn(context, instance, image_meta, injected_files, [ 655.353096] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] vm_ref = self.build_virtual_machine(instance, [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] vif_infos = vmwarevif.get_vif_info(self._session, [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] for vif in network_info: [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return self._sync_wrapper(fn, *args, **kwargs) [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self.wait() [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self[:] = self._gt.wait() [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 655.353514] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return self._exit_event.wait() [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] result = hub.switch() [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return self.greenlet.switch() [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] result = function(*args, **kwargs) [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] return func(*args, **kwargs) [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] raise e [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] nwinfo = self.network_api.allocate_for_instance( [ 655.354026] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] created_port_ids = self._update_ports_for_instance( [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] with excutils.save_and_reraise_exception(): [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] self.force_reraise() [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] raise self.value [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] updated_port = self._update_port( [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] _ensure_no_port_binding_failure(port) [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.354711] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] raise exception.PortBindingFailed(port_id=port['id']) [ 655.355239] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] nova.exception.PortBindingFailed: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. [ 655.355239] env[61998]: ERROR nova.compute.manager [instance: 70af5d32-254f-4819-8cca-c28346e48139] [ 655.355239] env[61998]: DEBUG nova.compute.utils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 655.355239] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.972s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.356430] env[61998]: INFO nova.compute.claims [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.363109] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Build of instance 70af5d32-254f-4819-8cca-c28346e48139 was re-scheduled: Binding failed for port 5a029700-355b-4874-a77b-2768950fce1a, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 655.363768] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 655.364103] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquiring lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.364313] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Acquired lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.364533] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.367609] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307d27bb-5db0-484a-81fa-33a0b948cf54 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.385641] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Traceback (most recent call last): [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] yield resources [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self.driver.spawn(context, instance, image_meta, [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] vm_ref = self.build_virtual_machine(instance, [ 655.385641] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] vif_infos = vmwarevif.get_vif_info(self._session, [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] for vif in network_info: [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] return self._sync_wrapper(fn, *args, **kwargs) [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self.wait() [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self[:] = self._gt.wait() [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] return self._exit_event.wait() [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 655.386058] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] current.throw(*self._exc) [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] result = function(*args, **kwargs) [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] return func(*args, **kwargs) [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] raise e [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] nwinfo = self.network_api.allocate_for_instance( [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] created_port_ids = self._update_ports_for_instance( [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] with excutils.save_and_reraise_exception(): [ 655.387554] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self.force_reraise() [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] raise self.value [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] updated_port = self._update_port( [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] _ensure_no_port_binding_failure(port) [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] raise exception.PortBindingFailed(port_id=port['id']) [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] nova.exception.PortBindingFailed: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. [ 655.387971] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] [ 655.387971] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Terminating instance [ 655.388738] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.388738] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquired lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.388738] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.393158] env[61998]: DEBUG nova.network.neutron [-] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.897593] env[61998]: INFO nova.compute.manager [-] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Took 1.03 seconds to deallocate network for instance. [ 655.899356] env[61998]: DEBUG nova.compute.claims [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 655.899356] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.901447] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.911104] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.999942] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.075910] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.503538] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Releasing lock "refresh_cache-70af5d32-254f-4819-8cca-c28346e48139" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.504246] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 656.504246] env[61998]: DEBUG nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 656.504246] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 656.525697] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.579067] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Releasing lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.579659] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 656.579736] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 656.580511] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf8cf232-f96d-4835-b46d-9ae82d8a3c28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.589361] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b48e3e3-bafc-4f82-b922-e09f117d608d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.603540] env[61998]: DEBUG nova.compute.manager [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Received event network-changed-d0951916-cda6-44b6-b427-5e6db7f3910d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 656.603786] env[61998]: DEBUG nova.compute.manager [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Refreshing instance network info cache due to event network-changed-d0951916-cda6-44b6-b427-5e6db7f3910d. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 656.603928] env[61998]: DEBUG oslo_concurrency.lockutils [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] Acquiring lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.604273] env[61998]: DEBUG oslo_concurrency.lockutils [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] Acquired lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.604273] env[61998]: DEBUG nova.network.neutron [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Refreshing network info cache for port d0951916-cda6-44b6-b427-5e6db7f3910d {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.621020] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dff2b45c-bf45-4b22-b78f-287019b483f2 could not be found. [ 656.621289] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 656.621491] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 656.621735] env[61998]: DEBUG oslo.service.loopingcall [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.624765] env[61998]: DEBUG nova.compute.manager [-] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 656.624899] env[61998]: DEBUG nova.network.neutron [-] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 656.643049] env[61998]: DEBUG nova.network.neutron [-] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.754662] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1928886f-8e14-474d-9253-59b8039686cd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.765664] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7f0d7e-90cb-46d4-8056-7f70deb9f68b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.798933] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0023321-c7e5-4c46-9dca-054f93c210e9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.807339] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd9d7d8-007d-48a6-8c3a-272b0b019768 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.821801] env[61998]: DEBUG nova.compute.provider_tree [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.028873] env[61998]: DEBUG nova.network.neutron [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.124132] env[61998]: DEBUG nova.network.neutron [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.146221] env[61998]: DEBUG nova.network.neutron [-] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.324673] env[61998]: DEBUG nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 657.339746] env[61998]: DEBUG nova.network.neutron [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.532099] env[61998]: INFO nova.compute.manager [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] [instance: 70af5d32-254f-4819-8cca-c28346e48139] Took 1.03 seconds to deallocate network for instance. [ 657.649230] env[61998]: INFO nova.compute.manager [-] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Took 1.02 seconds to deallocate network for instance. [ 657.651652] env[61998]: DEBUG nova.compute.claims [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 657.651826] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.830447] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.830967] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 657.833529] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.345s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.834945] env[61998]: INFO nova.compute.claims [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.842284] env[61998]: DEBUG oslo_concurrency.lockutils [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] Releasing lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.842284] env[61998]: DEBUG nova.compute.manager [req-183f41f3-ad7a-4a47-abac-4fc4a562db06 req-39637e05-69a5-442a-8059-1bfa3ae3abc4 service nova] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Received event network-vif-deleted-d0951916-cda6-44b6-b427-5e6db7f3910d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 658.339983] env[61998]: DEBUG nova.compute.utils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.344542] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 658.344542] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.388811] env[61998]: DEBUG nova.policy [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ffba8b1830244d091b15706a3f986d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad8f2951990c4b3b9059adcd4f0e65dd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.565071] env[61998]: INFO nova.scheduler.client.report [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Deleted allocations for instance 70af5d32-254f-4819-8cca-c28346e48139 [ 658.699204] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Successfully created port: 57f06003-81d6-47d4-b5bc-48618340fa59 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.846796] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 659.076367] env[61998]: DEBUG oslo_concurrency.lockutils [None req-71643155-6db5-4d2f-86e4-0ef8f904b56d tempest-DeleteServersAdminTestJSON-1949675843 tempest-DeleteServersAdminTestJSON-1949675843-project-member] Lock "70af5d32-254f-4819-8cca-c28346e48139" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.437s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.244386] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7e6631-8271-4dca-9be0-2d19880b9897 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.252974] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab84a33b-d08f-425d-8fef-a6c00626f852 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.284667] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4743cd-5ebe-4d41-909a-ad92eff60873 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.292135] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1692567-174f-4fb4-83e7-cce62a361bff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.306022] env[61998]: DEBUG nova.compute.provider_tree [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.581018] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 659.599686] env[61998]: DEBUG nova.compute.manager [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Received event network-changed-57f06003-81d6-47d4-b5bc-48618340fa59 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 659.599972] env[61998]: DEBUG nova.compute.manager [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Refreshing instance network info cache due to event network-changed-57f06003-81d6-47d4-b5bc-48618340fa59. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 659.600207] env[61998]: DEBUG oslo_concurrency.lockutils [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] Acquiring lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.600356] env[61998]: DEBUG oslo_concurrency.lockutils [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] Acquired lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.600513] env[61998]: DEBUG nova.network.neutron [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Refreshing network info cache for port 57f06003-81d6-47d4-b5bc-48618340fa59 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.709434] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. [ 659.709434] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 659.709434] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 659.709434] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 659.709434] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.709434] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 659.709434] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.709434] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 659.709434] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.709434] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 659.709434] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.709434] env[61998]: ERROR nova.compute.manager raise self.value [ 659.709434] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.709434] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 659.709434] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.709434] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 659.710016] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.710016] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 659.710016] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. [ 659.710016] env[61998]: ERROR nova.compute.manager [ 659.710016] env[61998]: Traceback (most recent call last): [ 659.710016] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 659.710016] env[61998]: listener.cb(fileno) [ 659.710016] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.710016] env[61998]: result = function(*args, **kwargs) [ 659.710016] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 659.710016] env[61998]: return func(*args, **kwargs) [ 659.710016] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 659.710016] env[61998]: raise e [ 659.710016] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 659.710016] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 659.710016] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.710016] env[61998]: created_port_ids = self._update_ports_for_instance( [ 659.710016] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.710016] env[61998]: with excutils.save_and_reraise_exception(): [ 659.710016] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.710016] env[61998]: self.force_reraise() [ 659.710016] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.710016] env[61998]: raise self.value [ 659.710016] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.710016] env[61998]: updated_port = self._update_port( [ 659.710016] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.710016] env[61998]: _ensure_no_port_binding_failure(port) [ 659.710016] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.710016] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 659.711706] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. [ 659.711706] env[61998]: Removing descriptor: 15 [ 659.809648] env[61998]: DEBUG nova.scheduler.client.report [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 659.861115] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 659.888429] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.888609] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.888779] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.888980] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.889146] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.889297] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.889502] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.889656] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.889822] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.890025] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.890205] env[61998]: DEBUG nova.virt.hardware [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.891077] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b4bcd0-e709-487b-97eb-53b3b40ee72b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.899061] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785a04db-52b3-437e-980c-c9a9ebedd248 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.912807] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Traceback (most recent call last): [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] yield resources [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self.driver.spawn(context, instance, image_meta, [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] vm_ref = self.build_virtual_machine(instance, [ 659.912807] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] for vif in network_info: [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] return self._sync_wrapper(fn, *args, **kwargs) [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self.wait() [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self[:] = self._gt.wait() [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] return self._exit_event.wait() [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 659.913419] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] current.throw(*self._exc) [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] result = function(*args, **kwargs) [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] return func(*args, **kwargs) [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] raise e [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] nwinfo = self.network_api.allocate_for_instance( [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] created_port_ids = self._update_ports_for_instance( [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] with excutils.save_and_reraise_exception(): [ 659.913946] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self.force_reraise() [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] raise self.value [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] updated_port = self._update_port( [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] _ensure_no_port_binding_failure(port) [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] raise exception.PortBindingFailed(port_id=port['id']) [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] nova.exception.PortBindingFailed: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. [ 659.914320] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] [ 659.914320] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Terminating instance [ 659.914814] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.100586] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.118492] env[61998]: DEBUG nova.network.neutron [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.214056] env[61998]: DEBUG nova.network.neutron [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.316044] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.317029] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 660.320266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.329s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.324099] env[61998]: INFO nova.compute.claims [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.716463] env[61998]: DEBUG oslo_concurrency.lockutils [req-82bb1688-87a3-46e7-8813-3099772062d1 req-cb96c2e3-8c1f-4f4a-acf9-2e20a5a7e76b service nova] Releasing lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.716801] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquired lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.717013] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.827560] env[61998]: DEBUG nova.compute.utils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.831127] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 660.831204] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.899216] env[61998]: DEBUG nova.policy [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '954e3c2d4be148199d9fc78297bb16a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8f9bcdfe6aa40e5a6dc0b39a1834891', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 661.185531] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Successfully created port: d14238cd-e084-4f82-bb48-1d9df1b5aae4 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.232482] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.326414] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.333833] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 661.631502] env[61998]: DEBUG nova.compute.manager [req-f4a6f53b-b7fa-4b94-8bed-a3587366567d req-1dfec221-1a5f-46fc-8368-154eac3ed441 service nova] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Received event network-vif-deleted-57f06003-81d6-47d4-b5bc-48618340fa59 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 661.745333] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b597c6e7-e953-4c31-9d3c-7f1be16dc4f3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.753788] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e297ae-0778-41cc-93ba-abb5536aa39d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.784811] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266b3ee4-fe85-4c05-b4ac-6517ecc5178e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.794233] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0230ada-0f91-41ba-8443-edc680cd76b6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.812189] env[61998]: DEBUG nova.compute.provider_tree [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.828030] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Releasing lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.829336] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 661.829336] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.829447] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-521f41af-4f04-4663-aa82-6032b2c20297 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.838977] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1123109-2d38-4556-86ae-6db88925fb6b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.868858] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6643cc70-7e92-41e9-b2dc-c531a331086f could not be found. [ 661.868858] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 661.869030] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 661.870081] env[61998]: DEBUG oslo.service.loopingcall [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.870081] env[61998]: DEBUG nova.compute.manager [-] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 661.870081] env[61998]: DEBUG nova.network.neutron [-] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 661.889675] env[61998]: DEBUG nova.network.neutron [-] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.292833] env[61998]: ERROR nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. [ 662.292833] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 662.292833] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 662.292833] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 662.292833] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.292833] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 662.292833] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.292833] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 662.292833] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.292833] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 662.292833] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.292833] env[61998]: ERROR nova.compute.manager raise self.value [ 662.292833] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.292833] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 662.292833] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.292833] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 662.293295] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.293295] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 662.293295] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. [ 662.293295] env[61998]: ERROR nova.compute.manager [ 662.293295] env[61998]: Traceback (most recent call last): [ 662.293295] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 662.293295] env[61998]: listener.cb(fileno) [ 662.293295] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.293295] env[61998]: result = function(*args, **kwargs) [ 662.293295] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.293295] env[61998]: return func(*args, **kwargs) [ 662.293295] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 662.293295] env[61998]: raise e [ 662.293295] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 662.293295] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 662.293295] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.293295] env[61998]: created_port_ids = self._update_ports_for_instance( [ 662.293295] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.293295] env[61998]: with excutils.save_and_reraise_exception(): [ 662.293295] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.293295] env[61998]: self.force_reraise() [ 662.293295] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.293295] env[61998]: raise self.value [ 662.293295] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.293295] env[61998]: updated_port = self._update_port( [ 662.293295] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.293295] env[61998]: _ensure_no_port_binding_failure(port) [ 662.293295] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.293295] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 662.294019] env[61998]: nova.exception.PortBindingFailed: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. [ 662.294019] env[61998]: Removing descriptor: 15 [ 662.316776] env[61998]: DEBUG nova.scheduler.client.report [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 662.357112] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 662.379784] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.380046] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.380235] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.380440] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.380584] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.380811] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.380931] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.381088] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.381242] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.381396] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.381578] env[61998]: DEBUG nova.virt.hardware [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.382482] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95787cc-03c4-4fbf-b416-133567d40a44 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.390318] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376ea457-0ccb-45af-a6e9-d679aa9831d0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.394142] env[61998]: DEBUG nova.network.neutron [-] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.405717] env[61998]: ERROR nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Traceback (most recent call last): [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] yield resources [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self.driver.spawn(context, instance, image_meta, [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] vm_ref = self.build_virtual_machine(instance, [ 662.405717] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] for vif in network_info: [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] return self._sync_wrapper(fn, *args, **kwargs) [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self.wait() [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self[:] = self._gt.wait() [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] return self._exit_event.wait() [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 662.406106] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] current.throw(*self._exc) [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] result = function(*args, **kwargs) [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] return func(*args, **kwargs) [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] raise e [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] nwinfo = self.network_api.allocate_for_instance( [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] created_port_ids = self._update_ports_for_instance( [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] with excutils.save_and_reraise_exception(): [ 662.406476] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self.force_reraise() [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] raise self.value [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] updated_port = self._update_port( [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] _ensure_no_port_binding_failure(port) [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] raise exception.PortBindingFailed(port_id=port['id']) [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] nova.exception.PortBindingFailed: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. [ 662.406820] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] [ 662.406820] env[61998]: INFO nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Terminating instance [ 662.407862] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.408030] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquired lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.408195] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.821688] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.822250] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 662.824829] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.050s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.898019] env[61998]: INFO nova.compute.manager [-] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Took 1.03 seconds to deallocate network for instance. [ 662.899837] env[61998]: DEBUG nova.compute.claims [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 662.900085] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.928641] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.077533] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.329137] env[61998]: DEBUG nova.compute.utils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.335065] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 663.335065] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.378024] env[61998]: DEBUG nova.policy [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '954e3c2d4be148199d9fc78297bb16a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8f9bcdfe6aa40e5a6dc0b39a1834891', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 663.579422] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Releasing lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.580730] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 663.580730] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 663.580730] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30f3a478-cb7f-4140-b9a3-f974a7d3ab34 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.590134] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befa72ef-1baf-4c97-9a64-c3de8ca116f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.614259] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6837b87-b01c-454c-b986-6f9fa57656bf could not be found. [ 663.614485] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 663.614662] env[61998]: INFO nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 663.614902] env[61998]: DEBUG oslo.service.loopingcall [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.615230] env[61998]: DEBUG nova.compute.manager [-] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 663.615326] env[61998]: DEBUG nova.network.neutron [-] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.630699] env[61998]: DEBUG nova.network.neutron [-] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.661633] env[61998]: DEBUG nova.compute.manager [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Received event network-changed-d14238cd-e084-4f82-bb48-1d9df1b5aae4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 663.661826] env[61998]: DEBUG nova.compute.manager [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Refreshing instance network info cache due to event network-changed-d14238cd-e084-4f82-bb48-1d9df1b5aae4. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 663.662044] env[61998]: DEBUG oslo_concurrency.lockutils [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] Acquiring lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.662188] env[61998]: DEBUG oslo_concurrency.lockutils [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] Acquired lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.662341] env[61998]: DEBUG nova.network.neutron [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Refreshing network info cache for port d14238cd-e084-4f82-bb48-1d9df1b5aae4 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.694037] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Successfully created port: fbbca0d9-b5fc-44c7-a41f-523cf7db5a95 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.835837] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 663.866291] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 8672c282-1a66-49b5-9c22-7136b567a52c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 663.866291] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 1a91b0c5-d852-424a-b576-3d2c76860b06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 663.866291] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 0d680f38-bd47-4aeb-8845-efa20667623b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 663.866291] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance e730a03f-64c0-4e94-bc66-d3006be8b3ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 663.866499] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance dff2b45c-bf45-4b22-b78f-287019b483f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 663.866499] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 6643cc70-7e92-41e9-b2dc-c531a331086f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 663.866655] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance c6837b87-b01c-454c-b986-6f9fa57656bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 663.866712] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 664.133685] env[61998]: DEBUG nova.network.neutron [-] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.182945] env[61998]: DEBUG nova.network.neutron [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.347116] env[61998]: DEBUG nova.network.neutron [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.369550] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 297c345a-a825-47b1-a9e4-a353758d32ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.639937] env[61998]: INFO nova.compute.manager [-] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Took 1.02 seconds to deallocate network for instance. [ 664.641095] env[61998]: DEBUG nova.compute.claims [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 664.641095] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.851487] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 664.852738] env[61998]: DEBUG oslo_concurrency.lockutils [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] Releasing lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.853686] env[61998]: DEBUG nova.compute.manager [req-baa0287a-1181-4881-8b42-729b1af05ca3 req-1f798760-d0aa-4614-8b4c-54efd2650c49 service nova] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Received event network-vif-deleted-d14238cd-e084-4f82-bb48-1d9df1b5aae4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 664.872703] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 35b6490b-eec9-4dc1-9de3-63c368bdc5d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.879054] env[61998]: ERROR nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. [ 664.879054] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 664.879054] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 664.879054] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 664.879054] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.879054] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 664.879054] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.879054] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 664.879054] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.879054] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 664.879054] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.879054] env[61998]: ERROR nova.compute.manager raise self.value [ 664.879054] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.879054] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 664.879054] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.879054] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 664.879513] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.879513] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 664.879513] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. [ 664.879513] env[61998]: ERROR nova.compute.manager [ 664.879513] env[61998]: Traceback (most recent call last): [ 664.879513] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 664.879513] env[61998]: listener.cb(fileno) [ 664.879513] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.879513] env[61998]: result = function(*args, **kwargs) [ 664.879513] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 664.879513] env[61998]: return func(*args, **kwargs) [ 664.879513] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 664.879513] env[61998]: raise e [ 664.879513] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 664.879513] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 664.879513] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.879513] env[61998]: created_port_ids = self._update_ports_for_instance( [ 664.879513] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.879513] env[61998]: with excutils.save_and_reraise_exception(): [ 664.879513] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.879513] env[61998]: self.force_reraise() [ 664.879513] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.879513] env[61998]: raise self.value [ 664.879513] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.879513] env[61998]: updated_port = self._update_port( [ 664.879513] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.879513] env[61998]: _ensure_no_port_binding_failure(port) [ 664.879513] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.879513] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 664.880299] env[61998]: nova.exception.PortBindingFailed: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. [ 664.880299] env[61998]: Removing descriptor: 15 [ 664.884404] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 664.884534] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 664.885749] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.885749] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 664.885749] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.885749] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 664.885749] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 664.886395] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 664.886395] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 664.886395] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 664.886395] env[61998]: DEBUG nova.virt.hardware [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 664.887516] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800a5623-3119-4db4-a13b-7e4898df8954 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.895661] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92884055-0379-4e6f-b86b-e4cc9d26d5ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.911022] env[61998]: ERROR nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Traceback (most recent call last): [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] yield resources [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self.driver.spawn(context, instance, image_meta, [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] vm_ref = self.build_virtual_machine(instance, [ 664.911022] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] vif_infos = vmwarevif.get_vif_info(self._session, [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] for vif in network_info: [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] return self._sync_wrapper(fn, *args, **kwargs) [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self.wait() [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self[:] = self._gt.wait() [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] return self._exit_event.wait() [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 664.911384] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] current.throw(*self._exc) [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] result = function(*args, **kwargs) [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] return func(*args, **kwargs) [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] raise e [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] nwinfo = self.network_api.allocate_for_instance( [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] created_port_ids = self._update_ports_for_instance( [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] with excutils.save_and_reraise_exception(): [ 664.911770] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self.force_reraise() [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] raise self.value [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] updated_port = self._update_port( [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] _ensure_no_port_binding_failure(port) [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] raise exception.PortBindingFailed(port_id=port['id']) [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] nova.exception.PortBindingFailed: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. [ 664.912131] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] [ 664.912131] env[61998]: INFO nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Terminating instance [ 664.913577] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.913659] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquired lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.913775] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.379288] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 9da95edb-f9fb-40f3-9317-d27f1bae0ecf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.437843] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.567729] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.728866] env[61998]: DEBUG nova.compute.manager [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Received event network-changed-fbbca0d9-b5fc-44c7-a41f-523cf7db5a95 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 665.729177] env[61998]: DEBUG nova.compute.manager [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Refreshing instance network info cache due to event network-changed-fbbca0d9-b5fc-44c7-a41f-523cf7db5a95. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 665.729398] env[61998]: DEBUG oslo_concurrency.lockutils [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] Acquiring lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.884922] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance a8101e8d-55d0-4f70-9119-f5e176ba8212 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.070764] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Releasing lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.074156] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 666.074156] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 666.074156] env[61998]: DEBUG oslo_concurrency.lockutils [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] Acquired lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.074156] env[61998]: DEBUG nova.network.neutron [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Refreshing network info cache for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 666.074156] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91df221a-34a7-423b-b218-0fd064047b5b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.085490] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ca24c2-dd2f-4061-9de3-7f4ee25da0a6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.110118] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1 could not be found. [ 666.110510] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.110801] env[61998]: INFO nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 666.111479] env[61998]: DEBUG oslo.service.loopingcall [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.111813] env[61998]: DEBUG nova.compute.manager [-] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 666.112033] env[61998]: DEBUG nova.network.neutron [-] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.138523] env[61998]: DEBUG nova.network.neutron [-] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.388037] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance f0a011bb-4939-4384-885c-6ce482875b4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.593419] env[61998]: DEBUG nova.network.neutron [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.640085] env[61998]: DEBUG nova.network.neutron [-] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.733936] env[61998]: DEBUG nova.network.neutron [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.891364] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.146235] env[61998]: INFO nova.compute.manager [-] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Took 1.03 seconds to deallocate network for instance. [ 667.152614] env[61998]: DEBUG nova.compute.claims [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 667.152614] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.162505] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.162659] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.207568] env[61998]: DEBUG oslo_concurrency.lockutils [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] Releasing lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.207854] env[61998]: DEBUG nova.compute.manager [req-801f8bd7-cbdc-4e86-88b7-2c3b9852ac6b req-bb6a6688-2ceb-464e-9fca-efce09263c86 service nova] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Received event network-vif-deleted-fbbca0d9-b5fc-44c7-a41f-523cf7db5a95 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 667.394043] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance f163fb1b-400f-4abb-8df6-0d9ea6449166 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.649420] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "ad1084f8-a0c9-4783-af2d-aa677116a451" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.649691] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "ad1084f8-a0c9-4783-af2d-aa677116a451" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.898618] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 9025d114-10da-4cf8-9e5f-2520bfd3b246 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.404137] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.907319] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 5f205b7d-d93e-436d-9d7d-04c6f767f7ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.410976] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance dce49aac-03f3-48ed-9bad-c5eb2d779bae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.914513] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 87f859c6-7a96-4a48-adb8-814a134ad4c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 670.420027] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance bc1ef57d-457d-446a-8ad4-3bab6d331215 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 670.922672] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance a733a167-9713-43b7-bcc0-b0af47879ffc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.426214] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 56e74975-e4fa-4ff8-ab87-aa74125dab78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.930889] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance e632307a-ffe9-45a6-9224-8598aea5d269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 672.434096] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance b9ec575c-034b-46bc-afbd-7a8a07a8e005 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 672.939064] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance e49e77d6-8d5e-4d89-b129-ac34cd1969c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 673.441473] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 3f2ed06f-27d0-4a73-a678-430db5334147 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 673.944894] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance f5c91aad-0f8a-4ad3-8566-7f36ff983575 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 673.945255] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 673.945378] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 674.280478] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1926d65b-f91e-4ba4-9849-4fbb36eca8ef {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.288130] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75eb02ce-a4c8-4552-893c-c97dced8b510 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.317600] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2e2d33-f006-405f-b85b-8085fa13f8a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.324437] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6243523-60cc-495a-89ab-8cc39a0993ae {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.337174] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.839935] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 675.344971] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 675.345369] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.520s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.345532] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.121s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.170511] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe13d51-baf8-4931-ac54-9e67a871ec8a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.177773] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd62c301-6b2b-4e9f-b68a-9ac214497d94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.208188] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acfe55d-5223-4816-bcff-efcc8981a474 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.215545] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9103245-0d38-4a4f-b9e5-048d058a7466 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.228799] env[61998]: DEBUG nova.compute.provider_tree [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.731892] env[61998]: DEBUG nova.scheduler.client.report [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 677.238249] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.893s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.238894] env[61998]: ERROR nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Traceback (most recent call last): [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self.driver.spawn(context, instance, image_meta, [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] vm_ref = self.build_virtual_machine(instance, [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.238894] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] for vif in network_info: [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] return self._sync_wrapper(fn, *args, **kwargs) [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self.wait() [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self[:] = self._gt.wait() [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] return self._exit_event.wait() [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] current.throw(*self._exc) [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.239435] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] result = function(*args, **kwargs) [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] return func(*args, **kwargs) [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] raise e [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] nwinfo = self.network_api.allocate_for_instance( [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] created_port_ids = self._update_ports_for_instance( [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] with excutils.save_and_reraise_exception(): [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] self.force_reraise() [ 677.239977] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] raise self.value [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] updated_port = self._update_port( [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] _ensure_no_port_binding_failure(port) [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] raise exception.PortBindingFailed(port_id=port['id']) [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] nova.exception.PortBindingFailed: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. [ 677.240562] env[61998]: ERROR nova.compute.manager [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] [ 677.240562] env[61998]: DEBUG nova.compute.utils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 677.241452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.167s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.243663] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Build of instance 8672c282-1a66-49b5-9c22-7136b567a52c was re-scheduled: Binding failed for port af39bae6-3885-49eb-ba2c-1a564b5a9f59, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 677.244086] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 677.244320] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquiring lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.244456] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Acquired lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.244673] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.765050] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.840055] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.087675] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e632fbd4-59ca-4056-a65e-91fd9693dfc5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.095256] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db1b0f4-0234-440c-940e-3ab15ef0b541 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.125179] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d3cbbd-956e-4d44-a865-f96ced355f93 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.132898] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4d2cb7-b6ee-4203-a02b-e2d6298ec836 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.147425] env[61998]: DEBUG nova.compute.provider_tree [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.345148] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Releasing lock "refresh_cache-8672c282-1a66-49b5-9c22-7136b567a52c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.345401] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 678.345570] env[61998]: DEBUG nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 678.345741] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 678.361210] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.653344] env[61998]: DEBUG nova.scheduler.client.report [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 678.863610] env[61998]: DEBUG nova.network.neutron [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.158707] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.918s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.159427] env[61998]: ERROR nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Traceback (most recent call last): [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self.driver.spawn(context, instance, image_meta, [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] vm_ref = self.build_virtual_machine(instance, [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.159427] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] for vif in network_info: [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] return self._sync_wrapper(fn, *args, **kwargs) [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self.wait() [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self[:] = self._gt.wait() [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] return self._exit_event.wait() [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] current.throw(*self._exc) [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.159758] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] result = function(*args, **kwargs) [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] return func(*args, **kwargs) [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] raise e [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] nwinfo = self.network_api.allocate_for_instance( [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] created_port_ids = self._update_ports_for_instance( [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] with excutils.save_and_reraise_exception(): [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] self.force_reraise() [ 679.160101] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] raise self.value [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] updated_port = self._update_port( [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] _ensure_no_port_binding_failure(port) [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] raise exception.PortBindingFailed(port_id=port['id']) [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] nova.exception.PortBindingFailed: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. [ 679.160463] env[61998]: ERROR nova.compute.manager [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] [ 679.160463] env[61998]: DEBUG nova.compute.utils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 679.161352] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.637s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.164105] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Build of instance 1a91b0c5-d852-424a-b576-3d2c76860b06 was re-scheduled: Binding failed for port 00fab4e9-12f4-49c9-a91b-9797f7c881c4, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 679.164509] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 679.164732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.164878] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquired lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.165045] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.367035] env[61998]: INFO nova.compute.manager [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] [instance: 8672c282-1a66-49b5-9c22-7136b567a52c] Took 1.02 seconds to deallocate network for instance. [ 679.685377] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.735503] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.012984] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cd1fb0-e8af-469f-b1cd-6a6f0c491e4f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.020592] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56624eab-2a81-43b4-86a7-1615545b84ea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.053204] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9ac0f4-59e9-4505-b840-f93f56f91338 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.061024] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44184966-ed5c-405c-bf26-ec1fef1bdefd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.075748] env[61998]: DEBUG nova.compute.provider_tree [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.239578] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Releasing lock "refresh_cache-1a91b0c5-d852-424a-b576-3d2c76860b06" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.239578] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 680.239578] env[61998]: DEBUG nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 680.239578] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 680.255308] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.405841] env[61998]: INFO nova.scheduler.client.report [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Deleted allocations for instance 8672c282-1a66-49b5-9c22-7136b567a52c [ 680.578474] env[61998]: DEBUG nova.scheduler.client.report [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 680.758644] env[61998]: DEBUG nova.network.neutron [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.914874] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e2054352-1bc4-484a-b0e1-e96ba6c2a080 tempest-MigrationsAdminTest-2033248062 tempest-MigrationsAdminTest-2033248062-project-member] Lock "8672c282-1a66-49b5-9c22-7136b567a52c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.881s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.083070] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.922s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.083701] env[61998]: ERROR nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Traceback (most recent call last): [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self.driver.spawn(context, instance, image_meta, [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] vm_ref = self.build_virtual_machine(instance, [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.083701] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] for vif in network_info: [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] return self._sync_wrapper(fn, *args, **kwargs) [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self.wait() [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self[:] = self._gt.wait() [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] return self._exit_event.wait() [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] current.throw(*self._exc) [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.084079] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] result = function(*args, **kwargs) [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] return func(*args, **kwargs) [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] raise e [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] nwinfo = self.network_api.allocate_for_instance( [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] created_port_ids = self._update_ports_for_instance( [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] with excutils.save_and_reraise_exception(): [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] self.force_reraise() [ 681.084587] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] raise self.value [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] updated_port = self._update_port( [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] _ensure_no_port_binding_failure(port) [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] raise exception.PortBindingFailed(port_id=port['id']) [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] nova.exception.PortBindingFailed: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. [ 681.085039] env[61998]: ERROR nova.compute.manager [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] [ 681.085039] env[61998]: DEBUG nova.compute.utils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 681.085753] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.015s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.087188] env[61998]: INFO nova.compute.claims [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.089795] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Build of instance 0d680f38-bd47-4aeb-8845-efa20667623b was re-scheduled: Binding failed for port d0d9f3dd-2904-460e-8c8d-0fead5382ec2, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 681.090258] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 681.090486] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquiring lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.090627] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Acquired lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.090782] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.264155] env[61998]: INFO nova.compute.manager [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 1a91b0c5-d852-424a-b576-3d2c76860b06] Took 1.03 seconds to deallocate network for instance. [ 681.419758] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 681.615449] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.684444] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.941615] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.187707] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Releasing lock "refresh_cache-0d680f38-bd47-4aeb-8845-efa20667623b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.187992] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 682.188226] env[61998]: DEBUG nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 682.188441] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.208359] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.309106] env[61998]: INFO nova.scheduler.client.report [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Deleted allocations for instance 1a91b0c5-d852-424a-b576-3d2c76860b06 [ 682.508864] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27968748-6cbf-4be7-aa04-43f7040ee0c7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.516970] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfc3cff-9d8c-4963-ab4b-106b9eb40fd5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.547507] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2fb339-70e5-4f5e-be0d-3175cdb53dde {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.555710] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62109123-d36e-486a-8cdc-f74b4ca92cdb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.568919] env[61998]: DEBUG nova.compute.provider_tree [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.711319] env[61998]: DEBUG nova.network.neutron [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.821102] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e5cdd16-fa2c-4569-a935-0afe9442561c tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "1a91b0c5-d852-424a-b576-3d2c76860b06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.387s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.071982] env[61998]: DEBUG nova.scheduler.client.report [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 683.214292] env[61998]: INFO nova.compute.manager [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] [instance: 0d680f38-bd47-4aeb-8845-efa20667623b] Took 1.03 seconds to deallocate network for instance. [ 683.324069] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 683.580149] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.580149] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 683.581192] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.682s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.847588] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.085730] env[61998]: DEBUG nova.compute.utils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.087186] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 684.087371] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.135695] env[61998]: DEBUG nova.policy [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17f109d724201a22264aa6ee02ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82b8854f80cf48628167fd6f678d7dd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 684.245175] env[61998]: INFO nova.scheduler.client.report [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Deleted allocations for instance 0d680f38-bd47-4aeb-8845-efa20667623b [ 684.535352] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99ca9ad-feee-46dd-8688-754c1d50dbd3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.544028] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d7cdca-e6f9-4756-afe4-2d794cf31cc0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.583023] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f72a87-43a6-49bd-8531-f6781b8e85f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.590879] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 684.595849] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Successfully created port: 551ad2d7-873a-4457-8669-12821e3e9793 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.598720] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66031712-6e7f-48c3-9935-5613d00f0a8e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.614647] env[61998]: DEBUG nova.compute.provider_tree [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.759502] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dc1300a6-1463-4f59-96ae-b139e1b815e9 tempest-ListImageFiltersTestJSON-593288268 tempest-ListImageFiltersTestJSON-593288268-project-member] Lock "0d680f38-bd47-4aeb-8845-efa20667623b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.525s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.122881] env[61998]: DEBUG nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 685.265038] env[61998]: DEBUG nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 685.516356] env[61998]: DEBUG nova.compute.manager [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Received event network-changed-551ad2d7-873a-4457-8669-12821e3e9793 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 685.516653] env[61998]: DEBUG nova.compute.manager [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Refreshing instance network info cache due to event network-changed-551ad2d7-873a-4457-8669-12821e3e9793. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 685.516906] env[61998]: DEBUG oslo_concurrency.lockutils [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] Acquiring lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.517182] env[61998]: DEBUG oslo_concurrency.lockutils [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] Acquired lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.519073] env[61998]: DEBUG nova.network.neutron [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Refreshing network info cache for port 551ad2d7-873a-4457-8669-12821e3e9793 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.609599] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 685.628305] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.047s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.628945] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Traceback (most recent call last): [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self.driver.spawn(context, instance, image_meta, [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] vm_ref = self.build_virtual_machine(instance, [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.628945] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] for vif in network_info: [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] return self._sync_wrapper(fn, *args, **kwargs) [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self.wait() [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self[:] = self._gt.wait() [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] return self._exit_event.wait() [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] current.throw(*self._exc) [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.629590] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] result = function(*args, **kwargs) [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] return func(*args, **kwargs) [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] raise e [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] nwinfo = self.network_api.allocate_for_instance( [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] created_port_ids = self._update_ports_for_instance( [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] with excutils.save_and_reraise_exception(): [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] self.force_reraise() [ 685.631831] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] raise self.value [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] updated_port = self._update_port( [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] _ensure_no_port_binding_failure(port) [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] raise exception.PortBindingFailed(port_id=port['id']) [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] nova.exception.PortBindingFailed: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. [ 685.632239] env[61998]: ERROR nova.compute.manager [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] [ 685.632239] env[61998]: DEBUG nova.compute.utils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 685.632503] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.979s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.641854] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Build of instance e730a03f-64c0-4e94-bc66-d3006be8b3ca was re-scheduled: Binding failed for port f224758a-f6d5-4302-9abb-9f5d2ae94568, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 685.642359] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 685.642584] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.642724] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquired lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.643498] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.665283] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 685.665283] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 685.665283] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.665660] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 685.665660] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.665660] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 685.665660] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 685.665660] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 685.665805] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 685.665805] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 685.665805] env[61998]: DEBUG nova.virt.hardware [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 685.670900] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e644a5f-4b5e-4c19-8ac8-d16588ccffc2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.687711] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f688e66-ca3a-4d0c-8ad1-bffc1c55b620 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.695559] env[61998]: ERROR nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. [ 685.695559] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 685.695559] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 685.695559] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 685.695559] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 685.695559] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 685.695559] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 685.695559] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 685.695559] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.695559] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 685.695559] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.695559] env[61998]: ERROR nova.compute.manager raise self.value [ 685.695559] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 685.695559] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 685.695559] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.695559] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 685.696054] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.696054] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 685.696054] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. [ 685.696054] env[61998]: ERROR nova.compute.manager [ 685.696054] env[61998]: Traceback (most recent call last): [ 685.696054] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 685.696054] env[61998]: listener.cb(fileno) [ 685.696054] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.696054] env[61998]: result = function(*args, **kwargs) [ 685.696054] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 685.696054] env[61998]: return func(*args, **kwargs) [ 685.696054] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 685.696054] env[61998]: raise e [ 685.696054] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 685.696054] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 685.696054] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 685.696054] env[61998]: created_port_ids = self._update_ports_for_instance( [ 685.696054] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 685.696054] env[61998]: with excutils.save_and_reraise_exception(): [ 685.696054] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.696054] env[61998]: self.force_reraise() [ 685.696054] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.696054] env[61998]: raise self.value [ 685.696054] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 685.696054] env[61998]: updated_port = self._update_port( [ 685.696054] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.696054] env[61998]: _ensure_no_port_binding_failure(port) [ 685.696054] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.696054] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 685.696839] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. [ 685.696839] env[61998]: Removing descriptor: 15 [ 685.708900] env[61998]: ERROR nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Traceback (most recent call last): [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] yield resources [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self.driver.spawn(context, instance, image_meta, [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] vm_ref = self.build_virtual_machine(instance, [ 685.708900] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] for vif in network_info: [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] return self._sync_wrapper(fn, *args, **kwargs) [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self.wait() [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self[:] = self._gt.wait() [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] return self._exit_event.wait() [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 685.709428] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] current.throw(*self._exc) [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] result = function(*args, **kwargs) [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] return func(*args, **kwargs) [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] raise e [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] nwinfo = self.network_api.allocate_for_instance( [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] created_port_ids = self._update_ports_for_instance( [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] with excutils.save_and_reraise_exception(): [ 685.709944] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self.force_reraise() [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] raise self.value [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] updated_port = self._update_port( [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] _ensure_no_port_binding_failure(port) [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] raise exception.PortBindingFailed(port_id=port['id']) [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] nova.exception.PortBindingFailed: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. [ 685.710283] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] [ 685.710283] env[61998]: INFO nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Terminating instance [ 685.711449] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.791987] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.053121] env[61998]: DEBUG nova.network.neutron [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.173135] env[61998]: DEBUG nova.network.neutron [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.177039] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.281791] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.559304] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6c7155-c3f2-457d-92cf-8b15b910fc1a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.567636] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855bfa4a-4812-4ffb-beef-3da77e5a17e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.601711] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feccc87c-1321-4cda-b05c-c093e6f6f87f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.609734] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6bda38-baf2-407e-bee5-5fadc2afc477 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.626675] env[61998]: DEBUG nova.compute.provider_tree [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.682974] env[61998]: DEBUG oslo_concurrency.lockutils [req-902240d9-b7a0-4ddb-9430-4c5e830acb23 req-c9b50825-d185-402a-a39b-deeb1eb71878 service nova] Releasing lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.682974] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.682974] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.785529] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Releasing lock "refresh_cache-e730a03f-64c0-4e94-bc66-d3006be8b3ca" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.785880] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 686.785969] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 686.786153] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.819631] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.134243] env[61998]: DEBUG nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 687.208945] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.231331] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Acquiring lock "429bcfa3-8bca-42c3-9049-b7ae09438f47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.231747] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Lock "429bcfa3-8bca-42c3-9049-b7ae09438f47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.324223] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.512584] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.541974] env[61998]: DEBUG nova.compute.manager [req-b503a828-700d-473b-bf4f-8a3ceebed703 req-262f9996-1c80-4201-8952-1fccdc0c3154 service nova] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Received event network-vif-deleted-551ad2d7-873a-4457-8669-12821e3e9793 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 687.637224] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.006s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.637844] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Traceback (most recent call last): [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self.driver.spawn(context, instance, image_meta, [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] vm_ref = self.build_virtual_machine(instance, [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] vif_infos = vmwarevif.get_vif_info(self._session, [ 687.637844] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] for vif in network_info: [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] return self._sync_wrapper(fn, *args, **kwargs) [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self.wait() [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self[:] = self._gt.wait() [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] return self._exit_event.wait() [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] current.throw(*self._exc) [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.638381] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] result = function(*args, **kwargs) [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] return func(*args, **kwargs) [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] raise e [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] nwinfo = self.network_api.allocate_for_instance( [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] created_port_ids = self._update_ports_for_instance( [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] with excutils.save_and_reraise_exception(): [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] self.force_reraise() [ 687.638753] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] raise self.value [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] updated_port = self._update_port( [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] _ensure_no_port_binding_failure(port) [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] raise exception.PortBindingFailed(port_id=port['id']) [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] nova.exception.PortBindingFailed: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. [ 687.639143] env[61998]: ERROR nova.compute.manager [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] [ 687.639143] env[61998]: DEBUG nova.compute.utils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 687.640595] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Build of instance dff2b45c-bf45-4b22-b78f-287019b483f2 was re-scheduled: Binding failed for port d0951916-cda6-44b6-b427-5e6db7f3910d, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 687.640705] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 687.640840] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.640985] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquired lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.641156] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.642135] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.542s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.645925] env[61998]: INFO nova.compute.claims [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.826588] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: e730a03f-64c0-4e94-bc66-d3006be8b3ca] Took 1.04 seconds to deallocate network for instance. [ 688.017409] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.017618] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 688.017810] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.018119] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94652dc1-f060-47de-be02-f28f29bb2eb3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.029489] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a12f67-373b-4c1c-9edc-1b35bf1bda50 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.052469] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 297c345a-a825-47b1-a9e4-a353758d32ce could not be found. [ 688.052586] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.052766] env[61998]: INFO nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Took 0.03 seconds to destroy the instance on the hypervisor. [ 688.053011] env[61998]: DEBUG oslo.service.loopingcall [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 688.053237] env[61998]: DEBUG nova.compute.manager [-] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 688.053329] env[61998]: DEBUG nova.network.neutron [-] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 688.072012] env[61998]: DEBUG nova.network.neutron [-] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.169625] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.318188] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.575044] env[61998]: DEBUG nova.network.neutron [-] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.821406] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Releasing lock "refresh_cache-dff2b45c-bf45-4b22-b78f-287019b483f2" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.821466] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 688.821593] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 688.821762] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 688.842816] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.876589] env[61998]: INFO nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Deleted allocations for instance e730a03f-64c0-4e94-bc66-d3006be8b3ca [ 689.078067] env[61998]: INFO nova.compute.manager [-] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Took 1.02 seconds to deallocate network for instance. [ 689.082658] env[61998]: DEBUG nova.compute.claims [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 689.082881] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.088302] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c771668-8fb8-48d3-83ce-744eeb10af20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.096183] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e2e2c0-8515-47f1-b283-f9b533631c17 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.131160] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6cf50c0-7324-439b-b222-c6e24a43e2fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.140610] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32632b3-6606-467b-9314-19ce68d44036 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.159425] env[61998]: DEBUG nova.compute.provider_tree [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.349825] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.386457] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "e730a03f-64c0-4e94-bc66-d3006be8b3ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.692s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.664907] env[61998]: DEBUG nova.scheduler.client.report [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 689.853372] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: dff2b45c-bf45-4b22-b78f-287019b483f2] Took 1.03 seconds to deallocate network for instance. [ 689.889262] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 690.173021] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.173021] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 690.177020] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.274s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.407975] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.675822] env[61998]: DEBUG nova.compute.utils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 690.677179] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 690.677349] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 690.736218] env[61998]: DEBUG nova.policy [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef611d261d7c426d933cba1140f93c05', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19811cd7f52142278b2430bc3a08f3e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 690.795418] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "2963f997-eb4c-4bfd-be28-6c1b383598c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.795418] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "2963f997-eb4c-4bfd-be28-6c1b383598c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.894178] env[61998]: INFO nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Deleted allocations for instance dff2b45c-bf45-4b22-b78f-287019b483f2 [ 691.094029] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f41683f-5296-454e-ad8c-bc459d83381d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.101542] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836ef3b7-2deb-42e5-ac55-9ff2aaccd214 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.131190] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55170d0e-c1d8-453c-8cf4-cb8c918ab8de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.139178] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25cae46-66af-4ee2-b21b-0e8f9f5bf506 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.154421] env[61998]: DEBUG nova.compute.provider_tree [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.179817] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 691.259513] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Successfully created port: e91bcb9a-9c45-437f-9f97-6497434ed3f3 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.407819] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "dff2b45c-bf45-4b22-b78f-287019b483f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.684s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.658162] env[61998]: DEBUG nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 691.911845] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 692.030183] env[61998]: DEBUG nova.compute.manager [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Received event network-changed-e91bcb9a-9c45-437f-9f97-6497434ed3f3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 692.030381] env[61998]: DEBUG nova.compute.manager [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Refreshing instance network info cache due to event network-changed-e91bcb9a-9c45-437f-9f97-6497434ed3f3. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 692.030615] env[61998]: DEBUG oslo_concurrency.lockutils [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] Acquiring lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.030725] env[61998]: DEBUG oslo_concurrency.lockutils [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] Acquired lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.030890] env[61998]: DEBUG nova.network.neutron [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Refreshing network info cache for port e91bcb9a-9c45-437f-9f97-6497434ed3f3 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.164884] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.991s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.165838] env[61998]: ERROR nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Traceback (most recent call last): [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self.driver.spawn(context, instance, image_meta, [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] vm_ref = self.build_virtual_machine(instance, [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.165838] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] for vif in network_info: [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] return self._sync_wrapper(fn, *args, **kwargs) [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self.wait() [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self[:] = self._gt.wait() [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] return self._exit_event.wait() [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] current.throw(*self._exc) [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.166177] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] result = function(*args, **kwargs) [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] return func(*args, **kwargs) [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] raise e [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] nwinfo = self.network_api.allocate_for_instance( [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] created_port_ids = self._update_ports_for_instance( [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] with excutils.save_and_reraise_exception(): [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] self.force_reraise() [ 692.166504] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] raise self.value [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] updated_port = self._update_port( [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] _ensure_no_port_binding_failure(port) [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] raise exception.PortBindingFailed(port_id=port['id']) [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] nova.exception.PortBindingFailed: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. [ 692.166829] env[61998]: ERROR nova.compute.manager [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] [ 692.167070] env[61998]: DEBUG nova.compute.utils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 692.168338] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.527s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.171540] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Build of instance 6643cc70-7e92-41e9-b2dc-c531a331086f was re-scheduled: Binding failed for port 57f06003-81d6-47d4-b5bc-48618340fa59, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 692.171930] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 692.172247] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquiring lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.172483] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Acquired lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.173372] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.187870] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 692.214383] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.214638] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.214789] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.214965] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.215128] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.215276] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.215481] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.215637] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.215802] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.215958] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.216159] env[61998]: DEBUG nova.virt.hardware [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.217273] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d218dde-579d-463c-ba1d-19364b74a8da {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.225285] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad0fd3d-247e-43a6-b762-31d601e1ca4e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.230237] env[61998]: ERROR nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. [ 692.230237] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 692.230237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 692.230237] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 692.230237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.230237] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 692.230237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.230237] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 692.230237] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.230237] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 692.230237] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.230237] env[61998]: ERROR nova.compute.manager raise self.value [ 692.230237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.230237] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 692.230237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.230237] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 692.230691] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.230691] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 692.230691] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. [ 692.230691] env[61998]: ERROR nova.compute.manager [ 692.230691] env[61998]: Traceback (most recent call last): [ 692.230691] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 692.230691] env[61998]: listener.cb(fileno) [ 692.230691] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.230691] env[61998]: result = function(*args, **kwargs) [ 692.230691] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.230691] env[61998]: return func(*args, **kwargs) [ 692.230691] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 692.230691] env[61998]: raise e [ 692.230691] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 692.230691] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 692.230691] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.230691] env[61998]: created_port_ids = self._update_ports_for_instance( [ 692.230691] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.230691] env[61998]: with excutils.save_and_reraise_exception(): [ 692.230691] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.230691] env[61998]: self.force_reraise() [ 692.230691] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.230691] env[61998]: raise self.value [ 692.230691] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.230691] env[61998]: updated_port = self._update_port( [ 692.230691] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.230691] env[61998]: _ensure_no_port_binding_failure(port) [ 692.230691] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.230691] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 692.231973] env[61998]: nova.exception.PortBindingFailed: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. [ 692.231973] env[61998]: Removing descriptor: 17 [ 692.240471] env[61998]: ERROR nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Traceback (most recent call last): [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] yield resources [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self.driver.spawn(context, instance, image_meta, [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] vm_ref = self.build_virtual_machine(instance, [ 692.240471] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] for vif in network_info: [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] return self._sync_wrapper(fn, *args, **kwargs) [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self.wait() [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self[:] = self._gt.wait() [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] return self._exit_event.wait() [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 692.241025] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] current.throw(*self._exc) [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] result = function(*args, **kwargs) [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] return func(*args, **kwargs) [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] raise e [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] nwinfo = self.network_api.allocate_for_instance( [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] created_port_ids = self._update_ports_for_instance( [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] with excutils.save_and_reraise_exception(): [ 692.241399] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self.force_reraise() [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] raise self.value [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] updated_port = self._update_port( [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] _ensure_no_port_binding_failure(port) [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] raise exception.PortBindingFailed(port_id=port['id']) [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] nova.exception.PortBindingFailed: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. [ 692.241791] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] [ 692.241791] env[61998]: INFO nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Terminating instance [ 692.242642] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Acquiring lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.432390] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.551021] env[61998]: DEBUG nova.network.neutron [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.609807] env[61998]: DEBUG nova.network.neutron [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.693293] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.775950] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.973697] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e071361-92a7-42c7-afae-dfde7e90b49e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.982222] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95370701-b2a9-4f45-a487-c37a457e4303 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.010671] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f777aa-366f-473b-8e56-e64811db4057 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.018026] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755c18a6-fe08-495b-8f56-29425fa6f722 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.030487] env[61998]: DEBUG nova.compute.provider_tree [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.113058] env[61998]: DEBUG oslo_concurrency.lockutils [req-83b5e73f-54ab-42d8-8604-c00e3f22b62e req-d39a87ec-1334-4244-95a2-af47f6c8c85e service nova] Releasing lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.113282] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Acquired lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.113463] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 693.279254] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Releasing lock "refresh_cache-6643cc70-7e92-41e9-b2dc-c531a331086f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.279497] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 693.279700] env[61998]: DEBUG nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 693.279882] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.295209] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.533716] env[61998]: DEBUG nova.scheduler.client.report [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 693.638256] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.688536] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.798193] env[61998]: DEBUG nova.network.neutron [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.040015] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.871s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.040669] env[61998]: ERROR nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Traceback (most recent call last): [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self.driver.spawn(context, instance, image_meta, [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] vm_ref = self.build_virtual_machine(instance, [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 694.040669] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] for vif in network_info: [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] return self._sync_wrapper(fn, *args, **kwargs) [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self.wait() [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self[:] = self._gt.wait() [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] return self._exit_event.wait() [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] current.throw(*self._exc) [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 694.040998] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] result = function(*args, **kwargs) [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] return func(*args, **kwargs) [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] raise e [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] nwinfo = self.network_api.allocate_for_instance( [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] created_port_ids = self._update_ports_for_instance( [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] with excutils.save_and_reraise_exception(): [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] self.force_reraise() [ 694.041340] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] raise self.value [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] updated_port = self._update_port( [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] _ensure_no_port_binding_failure(port) [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] raise exception.PortBindingFailed(port_id=port['id']) [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] nova.exception.PortBindingFailed: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. [ 694.041678] env[61998]: ERROR nova.compute.manager [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] [ 694.041678] env[61998]: DEBUG nova.compute.utils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 694.042588] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.893s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.045338] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Build of instance c6837b87-b01c-454c-b986-6f9fa57656bf was re-scheduled: Binding failed for port d14238cd-e084-4f82-bb48-1d9df1b5aae4, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 694.045755] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 694.045974] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.046128] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquired lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.046283] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.054886] env[61998]: DEBUG nova.compute.manager [req-dcea887f-0033-4382-9aab-889c1492c083 req-5d6b13e7-7cd8-4c34-b78d-982c9f46e98d service nova] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Received event network-vif-deleted-e91bcb9a-9c45-437f-9f97-6497434ed3f3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 694.191458] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Releasing lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.191918] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 694.192152] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.192439] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d5aa5ba-67c9-45c7-a79b-fd79dac418f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.201348] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d66f35-c51c-40e4-8d6b-6f83fc253fc2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.223510] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 35b6490b-eec9-4dc1-9de3-63c368bdc5d7 could not be found. [ 694.223711] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 694.223896] env[61998]: INFO nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Took 0.03 seconds to destroy the instance on the hypervisor. [ 694.224126] env[61998]: DEBUG oslo.service.loopingcall [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 694.224328] env[61998]: DEBUG nova.compute.manager [-] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 694.224421] env[61998]: DEBUG nova.network.neutron [-] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.238124] env[61998]: DEBUG nova.network.neutron [-] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.300709] env[61998]: INFO nova.compute.manager [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] [instance: 6643cc70-7e92-41e9-b2dc-c531a331086f] Took 1.02 seconds to deallocate network for instance. [ 694.574209] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.659015] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.740810] env[61998]: DEBUG nova.network.neutron [-] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.855821] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a200e3-c9fc-463c-9a2c-4d401406c273 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.861888] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10098ee5-005b-468e-93ce-368500dc9352 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.893232] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b831cc-3bc4-4352-bb1d-2c1cd79f9842 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.900039] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc87e85-7cc5-4c0b-bcb4-f45915aab887 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.912130] env[61998]: DEBUG nova.compute.provider_tree [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.163713] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Releasing lock "refresh_cache-c6837b87-b01c-454c-b986-6f9fa57656bf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.163713] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 695.163713] env[61998]: DEBUG nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 695.163713] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.178171] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.244036] env[61998]: INFO nova.compute.manager [-] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Took 1.02 seconds to deallocate network for instance. [ 695.248783] env[61998]: DEBUG nova.compute.claims [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 695.248959] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.332168] env[61998]: INFO nova.scheduler.client.report [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Deleted allocations for instance 6643cc70-7e92-41e9-b2dc-c531a331086f [ 695.415998] env[61998]: DEBUG nova.scheduler.client.report [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 695.681424] env[61998]: DEBUG nova.network.neutron [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.842700] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8091cf39-2135-4941-84ab-75f207b21f28 tempest-ListServersNegativeTestJSON-1628676500 tempest-ListServersNegativeTestJSON-1628676500-project-member] Lock "6643cc70-7e92-41e9-b2dc-c531a331086f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.070s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.921034] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.878s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.921657] env[61998]: ERROR nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Traceback (most recent call last): [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self.driver.spawn(context, instance, image_meta, [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] vm_ref = self.build_virtual_machine(instance, [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] vif_infos = vmwarevif.get_vif_info(self._session, [ 695.921657] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] for vif in network_info: [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] return self._sync_wrapper(fn, *args, **kwargs) [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self.wait() [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self[:] = self._gt.wait() [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] return self._exit_event.wait() [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] current.throw(*self._exc) [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.921984] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] result = function(*args, **kwargs) [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] return func(*args, **kwargs) [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] raise e [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] nwinfo = self.network_api.allocate_for_instance( [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] created_port_ids = self._update_ports_for_instance( [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] with excutils.save_and_reraise_exception(): [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] self.force_reraise() [ 695.922494] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] raise self.value [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] updated_port = self._update_port( [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] _ensure_no_port_binding_failure(port) [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] raise exception.PortBindingFailed(port_id=port['id']) [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] nova.exception.PortBindingFailed: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. [ 695.922884] env[61998]: ERROR nova.compute.manager [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] [ 695.922884] env[61998]: DEBUG nova.compute.utils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 695.923451] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.982s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.924934] env[61998]: INFO nova.compute.claims [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.927801] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Build of instance 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1 was re-scheduled: Binding failed for port fbbca0d9-b5fc-44c7-a41f-523cf7db5a95, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 695.928203] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 695.928418] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquiring lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.928561] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Acquired lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.928715] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.183919] env[61998]: INFO nova.compute.manager [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: c6837b87-b01c-454c-b986-6f9fa57656bf] Took 1.02 seconds to deallocate network for instance. [ 696.345810] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 696.454929] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.544931] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.870985] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.047379] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Releasing lock "refresh_cache-41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.047609] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 697.047817] env[61998]: DEBUG nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 697.047984] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.209589] env[61998]: INFO nova.scheduler.client.report [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Deleted allocations for instance c6837b87-b01c-454c-b986-6f9fa57656bf [ 697.228079] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.286821] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31db318d-65ed-4631-a2bf-8ac48933ad30 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.294904] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8afb87e-4a10-41d7-a76c-17faf6d90e5f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.323874] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea1da75-6666-4493-9aff-4d0a28325a94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.331018] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137f7b09-02f6-4aac-881e-a971f8444baa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.343565] env[61998]: DEBUG nova.compute.provider_tree [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.717594] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b24844b-8bfd-4726-9459-0b7ba8621469 tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "c6837b87-b01c-454c-b986-6f9fa57656bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.411s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.732412] env[61998]: DEBUG nova.network.neutron [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.846750] env[61998]: DEBUG nova.scheduler.client.report [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 698.221813] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 698.237976] env[61998]: INFO nova.compute.manager [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] [instance: 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1] Took 1.19 seconds to deallocate network for instance. [ 698.355734] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.357132] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 698.359659] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.512s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.361186] env[61998]: INFO nova.compute.claims [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.745719] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.866094] env[61998]: DEBUG nova.compute.utils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.869668] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 698.869757] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 698.933896] env[61998]: DEBUG nova.policy [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '274f384481b44caf9a114ca29022c231', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '067efe2b964d498fa7e2843439f023ca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 699.239057] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Successfully created port: 7aacda59-a6a6-4e46-a147-217efa725665 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 699.268203] env[61998]: INFO nova.scheduler.client.report [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Deleted allocations for instance 41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1 [ 699.371025] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 699.693777] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ed0f8c-fcc4-4358-9d2b-6ebe58150c7a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.701886] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d91a11-ebe2-474e-91bf-649e22abaeb1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.734063] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65190759-8f45-4a4f-bf59-8c936b80e690 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.741618] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bc2b09-dd59-4f71-ac88-535d150534b4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.755059] env[61998]: DEBUG nova.compute.provider_tree [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.778917] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ec0f465-99ab-4e9c-9ad3-f368e61238ee tempest-ServersAdminTestJSON-252421511 tempest-ServersAdminTestJSON-252421511-project-member] Lock "41bc5a14-1d13-4730-80c5-a0bcfb1f0ad1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.472s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.150329] env[61998]: DEBUG nova.compute.manager [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Received event network-changed-7aacda59-a6a6-4e46-a147-217efa725665 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 700.150525] env[61998]: DEBUG nova.compute.manager [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Refreshing instance network info cache due to event network-changed-7aacda59-a6a6-4e46-a147-217efa725665. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 700.150733] env[61998]: DEBUG oslo_concurrency.lockutils [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] Acquiring lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.150962] env[61998]: DEBUG oslo_concurrency.lockutils [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] Acquired lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.151314] env[61998]: DEBUG nova.network.neutron [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Refreshing network info cache for port 7aacda59-a6a6-4e46-a147-217efa725665 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 700.257973] env[61998]: DEBUG nova.scheduler.client.report [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 700.282537] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 700.309045] env[61998]: ERROR nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. [ 700.309045] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 700.309045] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 700.309045] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 700.309045] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 700.309045] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 700.309045] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 700.309045] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 700.309045] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 700.309045] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 700.309045] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 700.309045] env[61998]: ERROR nova.compute.manager raise self.value [ 700.309045] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 700.309045] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 700.309045] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 700.309045] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 700.309638] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 700.309638] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 700.309638] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. [ 700.309638] env[61998]: ERROR nova.compute.manager [ 700.309638] env[61998]: Traceback (most recent call last): [ 700.309638] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 700.309638] env[61998]: listener.cb(fileno) [ 700.309638] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 700.309638] env[61998]: result = function(*args, **kwargs) [ 700.309638] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 700.309638] env[61998]: return func(*args, **kwargs) [ 700.309638] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 700.309638] env[61998]: raise e [ 700.309638] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 700.309638] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 700.309638] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 700.309638] env[61998]: created_port_ids = self._update_ports_for_instance( [ 700.309638] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 700.309638] env[61998]: with excutils.save_and_reraise_exception(): [ 700.309638] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 700.309638] env[61998]: self.force_reraise() [ 700.309638] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 700.309638] env[61998]: raise self.value [ 700.309638] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 700.309638] env[61998]: updated_port = self._update_port( [ 700.309638] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 700.309638] env[61998]: _ensure_no_port_binding_failure(port) [ 700.309638] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 700.309638] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 700.310494] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. [ 700.310494] env[61998]: Removing descriptor: 17 [ 700.383256] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 700.417428] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 700.417681] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 700.417868] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.418049] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 700.418198] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.418342] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 700.418547] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 700.418719] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 700.418911] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 700.419292] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 700.419502] env[61998]: DEBUG nova.virt.hardware [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 700.420911] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16f7b21-4560-46b9-a9fd-287f901b8a8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.430251] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a82a33-b613-4ef5-ac97-5877b11cacd9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.444594] env[61998]: ERROR nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Traceback (most recent call last): [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] yield resources [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self.driver.spawn(context, instance, image_meta, [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] vm_ref = self.build_virtual_machine(instance, [ 700.444594] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] vif_infos = vmwarevif.get_vif_info(self._session, [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] for vif in network_info: [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] return self._sync_wrapper(fn, *args, **kwargs) [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self.wait() [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self[:] = self._gt.wait() [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] return self._exit_event.wait() [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 700.445089] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] current.throw(*self._exc) [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] result = function(*args, **kwargs) [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] return func(*args, **kwargs) [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] raise e [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] nwinfo = self.network_api.allocate_for_instance( [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] created_port_ids = self._update_ports_for_instance( [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] with excutils.save_and_reraise_exception(): [ 700.445420] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self.force_reraise() [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] raise self.value [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] updated_port = self._update_port( [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] _ensure_no_port_binding_failure(port) [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] raise exception.PortBindingFailed(port_id=port['id']) [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] nova.exception.PortBindingFailed: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. [ 700.445812] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] [ 700.445812] env[61998]: INFO nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Terminating instance [ 700.449820] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.675260] env[61998]: DEBUG nova.network.neutron [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.768289] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.768741] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 700.771256] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.979s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.775020] env[61998]: INFO nova.compute.claims [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.816202] env[61998]: DEBUG nova.network.neutron [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.820679] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.276613] env[61998]: DEBUG nova.compute.utils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 701.281138] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 701.281822] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 701.317966] env[61998]: DEBUG oslo_concurrency.lockutils [req-2114ad22-ed86-4eba-836f-cf8e69cb6a1a req-d89d5d42-22d1-4d39-a9f2-68c8c3e7c38e service nova] Releasing lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.318671] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquired lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.318852] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.365238] env[61998]: DEBUG nova.policy [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2db7d86457464d5cbb1e439a5d1524cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '84aa8195b80f4f76be5dc8df017bc297', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 701.759613] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Successfully created port: 0878c0d8-e560-4f55-bbea-86c59a754ba1 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.781780] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 701.867911] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.025871] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.192099] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477ff69c-58a8-4a34-8cd0-76408256c899 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.203508] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08405f3c-4f15-4523-8951-05897c67047c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.237448] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d11749-b4e9-4e37-a0d8-8998c75d444d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.245412] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640923dd-c716-458b-a90d-edd2221d2cf9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.258612] env[61998]: DEBUG nova.compute.provider_tree [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.319889] env[61998]: DEBUG nova.compute.manager [req-b2f15bf2-03db-4369-9e98-c10512124543 req-86eea841-8ecc-47f3-bafd-386e472b081c service nova] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Received event network-vif-deleted-7aacda59-a6a6-4e46-a147-217efa725665 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 702.532355] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Releasing lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.532956] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 702.533120] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.533504] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-372dd3d0-e9d8-485e-a06d-2e4cd7547ebe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.542563] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f002e3-73e2-4e00-8527-5d82114c2d38 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.566496] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9da95edb-f9fb-40f3-9317-d27f1bae0ecf could not be found. [ 702.566877] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 702.567174] env[61998]: INFO nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 702.567440] env[61998]: DEBUG oslo.service.loopingcall [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 702.567671] env[61998]: DEBUG nova.compute.manager [-] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 702.567766] env[61998]: DEBUG nova.network.neutron [-] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.587747] env[61998]: DEBUG nova.network.neutron [-] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.763868] env[61998]: DEBUG nova.scheduler.client.report [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 702.794201] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 702.818985] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 702.818985] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 702.818985] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.819310] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 702.819310] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.819310] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 702.821573] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 702.822035] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 702.822274] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 702.822413] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 702.822670] env[61998]: DEBUG nova.virt.hardware [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.824162] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533ddc29-8629-43f5-910e-dd6c4811acd6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.834683] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab5b636-7712-4bc7-aa8e-e31781ca1576 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.029141] env[61998]: ERROR nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. [ 703.029141] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 703.029141] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 703.029141] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 703.029141] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 703.029141] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 703.029141] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 703.029141] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 703.029141] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.029141] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 703.029141] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.029141] env[61998]: ERROR nova.compute.manager raise self.value [ 703.029141] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 703.029141] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 703.029141] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.029141] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 703.029616] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.029616] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 703.029616] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. [ 703.029616] env[61998]: ERROR nova.compute.manager [ 703.029616] env[61998]: Traceback (most recent call last): [ 703.029616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 703.029616] env[61998]: listener.cb(fileno) [ 703.029616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 703.029616] env[61998]: result = function(*args, **kwargs) [ 703.029616] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 703.029616] env[61998]: return func(*args, **kwargs) [ 703.029616] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 703.029616] env[61998]: raise e [ 703.029616] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 703.029616] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 703.029616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 703.029616] env[61998]: created_port_ids = self._update_ports_for_instance( [ 703.029616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 703.029616] env[61998]: with excutils.save_and_reraise_exception(): [ 703.029616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.029616] env[61998]: self.force_reraise() [ 703.029616] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.029616] env[61998]: raise self.value [ 703.029616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 703.029616] env[61998]: updated_port = self._update_port( [ 703.029616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.029616] env[61998]: _ensure_no_port_binding_failure(port) [ 703.029616] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.029616] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 703.030524] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. [ 703.030524] env[61998]: Removing descriptor: 17 [ 703.030524] env[61998]: ERROR nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Traceback (most recent call last): [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] yield resources [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self.driver.spawn(context, instance, image_meta, [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self._vmops.spawn(context, instance, image_meta, injected_files, [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 703.030524] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] vm_ref = self.build_virtual_machine(instance, [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] vif_infos = vmwarevif.get_vif_info(self._session, [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] for vif in network_info: [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return self._sync_wrapper(fn, *args, **kwargs) [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self.wait() [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self[:] = self._gt.wait() [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return self._exit_event.wait() [ 703.030898] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] result = hub.switch() [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return self.greenlet.switch() [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] result = function(*args, **kwargs) [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return func(*args, **kwargs) [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] raise e [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] nwinfo = self.network_api.allocate_for_instance( [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 703.031380] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] created_port_ids = self._update_ports_for_instance( [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] with excutils.save_and_reraise_exception(): [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self.force_reraise() [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] raise self.value [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] updated_port = self._update_port( [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] _ensure_no_port_binding_failure(port) [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.032364] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] raise exception.PortBindingFailed(port_id=port['id']) [ 703.032774] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] nova.exception.PortBindingFailed: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. [ 703.032774] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] [ 703.032774] env[61998]: INFO nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Terminating instance [ 703.034451] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Acquiring lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.034451] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Acquired lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.034451] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.089351] env[61998]: DEBUG nova.network.neutron [-] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.271560] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.272144] env[61998]: DEBUG nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 703.274789] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.192s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.559616] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.591953] env[61998]: INFO nova.compute.manager [-] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Took 1.02 seconds to deallocate network for instance. [ 703.595043] env[61998]: DEBUG nova.compute.claims [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 703.595234] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.686143] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.758872] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquiring lock "08e60642-0784-4898-9de5-444a24fba508" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.758872] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "08e60642-0784-4898-9de5-444a24fba508" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.779817] env[61998]: DEBUG nova.compute.utils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.783300] env[61998]: DEBUG nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Not allocating networking since 'none' was specified. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 704.145939] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57852745-63f1-49e8-b86a-c6e592b89a52 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.154963] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c0ceb3-adb8-42aa-ac85-4346a817cb03 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.187111] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c24c084-7a7f-47d0-b10e-4df15a0a89af {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.189818] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Releasing lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.191116] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 704.191356] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.191629] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa12ae8b-e040-47ad-a065-d096943e2371 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.199470] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557c9820-8051-4f74-860b-8e0591e9e065 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.205871] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d277ea-0ecd-473e-8ec6-61ac37e9ae29 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.225383] env[61998]: DEBUG nova.compute.provider_tree [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.231720] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a8101e8d-55d0-4f70-9119-f5e176ba8212 could not be found. [ 704.231720] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.231720] env[61998]: INFO nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Took 0.04 seconds to destroy the instance on the hypervisor. [ 704.231720] env[61998]: DEBUG oslo.service.loopingcall [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 704.232129] env[61998]: DEBUG nova.compute.manager [-] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 704.233417] env[61998]: DEBUG nova.network.neutron [-] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.247503] env[61998]: DEBUG nova.network.neutron [-] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.280823] env[61998]: DEBUG nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 704.344496] env[61998]: DEBUG nova.compute.manager [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Received event network-changed-0878c0d8-e560-4f55-bbea-86c59a754ba1 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 704.344496] env[61998]: DEBUG nova.compute.manager [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Refreshing instance network info cache due to event network-changed-0878c0d8-e560-4f55-bbea-86c59a754ba1. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 704.344655] env[61998]: DEBUG oslo_concurrency.lockutils [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] Acquiring lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.344655] env[61998]: DEBUG oslo_concurrency.lockutils [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] Acquired lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.345660] env[61998]: DEBUG nova.network.neutron [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Refreshing network info cache for port 0878c0d8-e560-4f55-bbea-86c59a754ba1 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.733663] env[61998]: DEBUG nova.scheduler.client.report [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 704.749926] env[61998]: DEBUG nova.network.neutron [-] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.866635] env[61998]: DEBUG nova.network.neutron [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.963263] env[61998]: DEBUG nova.network.neutron [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.238849] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.964s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.239486] env[61998]: ERROR nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Traceback (most recent call last): [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self.driver.spawn(context, instance, image_meta, [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] vm_ref = self.build_virtual_machine(instance, [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.239486] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] for vif in network_info: [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] return self._sync_wrapper(fn, *args, **kwargs) [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self.wait() [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self[:] = self._gt.wait() [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] return self._exit_event.wait() [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] current.throw(*self._exc) [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.239852] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] result = function(*args, **kwargs) [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] return func(*args, **kwargs) [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] raise e [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] nwinfo = self.network_api.allocate_for_instance( [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] created_port_ids = self._update_ports_for_instance( [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] with excutils.save_and_reraise_exception(): [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] self.force_reraise() [ 705.240253] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] raise self.value [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] updated_port = self._update_port( [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] _ensure_no_port_binding_failure(port) [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] raise exception.PortBindingFailed(port_id=port['id']) [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] nova.exception.PortBindingFailed: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. [ 705.240594] env[61998]: ERROR nova.compute.manager [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] [ 705.240594] env[61998]: DEBUG nova.compute.utils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 705.241636] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.834s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.242903] env[61998]: INFO nova.compute.claims [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.245628] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Build of instance 297c345a-a825-47b1-a9e4-a353758d32ce was re-scheduled: Binding failed for port 551ad2d7-873a-4457-8669-12821e3e9793, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 705.245900] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 705.246129] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.246273] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.246440] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.253388] env[61998]: INFO nova.compute.manager [-] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Took 1.02 seconds to deallocate network for instance. [ 705.254193] env[61998]: DEBUG nova.compute.claims [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 705.254361] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.289839] env[61998]: DEBUG nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 705.318993] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 705.319190] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 705.319343] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.319522] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 705.319665] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.319808] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 705.320294] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 705.320498] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 705.320679] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 705.320846] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 705.321050] env[61998]: DEBUG nova.virt.hardware [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 705.322230] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebc15d6-e55c-4cd5-b4ea-68458ea38e53 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.330701] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed05659-abff-46e8-a27b-569504a83964 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.344023] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.349888] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Creating folder: Project (54ebb868b19a441b94148236b279fdcc). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.350631] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73c440a0-4b95-446e-ba0e-c3faa78f34d7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.360965] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Created folder: Project (54ebb868b19a441b94148236b279fdcc) in parent group-v294665. [ 705.361289] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Creating folder: Instances. Parent ref: group-v294686. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.361445] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad5869fc-9711-4120-a68a-460474fa2448 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.369944] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Created folder: Instances in parent group-v294686. [ 705.370054] env[61998]: DEBUG oslo.service.loopingcall [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 705.370985] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.370985] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b95fddf1-23ef-4b24-ab6b-ba5cd13a84b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.386055] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.386055] env[61998]: value = "task-1388444" [ 705.386055] env[61998]: _type = "Task" [ 705.386055] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.393113] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388444, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.465597] env[61998]: DEBUG oslo_concurrency.lockutils [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] Releasing lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.466081] env[61998]: DEBUG nova.compute.manager [req-407d4895-72b5-4a64-8b69-6534dd2d79f8 req-1f86da42-2db4-471c-919f-08ffa38488c6 service nova] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Received event network-vif-deleted-0878c0d8-e560-4f55-bbea-86c59a754ba1 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 705.765880] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.873766] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.895726] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388444, 'name': CreateVM_Task, 'duration_secs': 0.248588} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.895726] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 705.896051] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.896211] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.896546] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 705.897303] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86f89c6f-4506-4b3d-88b5-ee32d0bff92b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.901093] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 705.901093] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cffd82-9dd6-a6a0-d88a-2937c1563f1a" [ 705.901093] env[61998]: _type = "Task" [ 705.901093] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.909582] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cffd82-9dd6-a6a0-d88a-2937c1563f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.018967] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "5eb786f1-7789-48a0-a04e-a4039e387f58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.019246] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "5eb786f1-7789-48a0-a04e-a4039e387f58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.258022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.258281] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.376364] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-297c345a-a825-47b1-a9e4-a353758d32ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.376594] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 706.376789] env[61998]: DEBUG nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 706.376959] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.391937] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.411397] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cffd82-9dd6-a6a0-d88a-2937c1563f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.010495} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.413640] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.413870] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.414108] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.414255] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.414448] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 706.415018] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e15a547-0b2c-4245-923e-c868d1682c5b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.422798] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 706.422973] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 706.425553] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88128b1d-e009-43fb-988a-419a1acd3c1c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.431021] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 706.431021] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524dea22-2fd4-f40c-010b-1cd64a483b1b" [ 706.431021] env[61998]: _type = "Task" [ 706.431021] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.438890] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524dea22-2fd4-f40c-010b-1cd64a483b1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.567576] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a01bbe-422d-44d4-bf49-52458689acfd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.574865] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a551077-b8e8-4e27-a30d-f0cc8481b667 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.606087] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e527b111-81c8-44b1-8d99-ec9b56f88f73 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.613782] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5061235f-be1a-4c84-bbe1-e0a62ed3f59c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.629156] env[61998]: DEBUG nova.compute.provider_tree [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.894336] env[61998]: DEBUG nova.network.neutron [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.942064] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524dea22-2fd4-f40c-010b-1cd64a483b1b, 'name': SearchDatastore_Task, 'duration_secs': 0.00793} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.942416] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-082cca22-348f-4341-8dd4-71a9f8c552f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.948172] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 706.948172] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52468032-eb49-6b29-6696-e13ab0ff7c76" [ 706.948172] env[61998]: _type = "Task" [ 706.948172] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.956181] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52468032-eb49-6b29-6696-e13ab0ff7c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.134570] env[61998]: DEBUG nova.scheduler.client.report [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 707.397163] env[61998]: INFO nova.compute.manager [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 297c345a-a825-47b1-a9e4-a353758d32ce] Took 1.02 seconds to deallocate network for instance. [ 707.458921] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52468032-eb49-6b29-6696-e13ab0ff7c76, 'name': SearchDatastore_Task, 'duration_secs': 0.008797} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.460006] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.460006] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] f0a011bb-4939-4384-885c-6ce482875b4e/f0a011bb-4939-4384-885c-6ce482875b4e.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 707.460006] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04f0e911-df4e-45de-8246-1ca64df2c20f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.466208] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 707.466208] env[61998]: value = "task-1388445" [ 707.466208] env[61998]: _type = "Task" [ 707.466208] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.473742] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.639325] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.639921] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 707.643271] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.211s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.644690] env[61998]: INFO nova.compute.claims [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.975266] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483777} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.975583] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] f0a011bb-4939-4384-885c-6ce482875b4e/f0a011bb-4939-4384-885c-6ce482875b4e.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.975704] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.976248] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18b9e468-2a33-44a4-b208-464e992a3404 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.982420] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 707.982420] env[61998]: value = "task-1388446" [ 707.982420] env[61998]: _type = "Task" [ 707.982420] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.990988] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.148923] env[61998]: DEBUG nova.compute.utils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 708.152914] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 708.153194] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 708.204941] env[61998]: DEBUG nova.policy [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '076c558af04945f085eaf90779250652', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3b19c0787ee4cf2806fb4418c34e78a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 708.427732] env[61998]: INFO nova.scheduler.client.report [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted allocations for instance 297c345a-a825-47b1-a9e4-a353758d32ce [ 708.493422] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063318} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.493725] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 708.494691] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc80538-6e7a-4636-b953-39fa26817784 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.498791] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Successfully created port: b9ef6b1d-c010-4647-9039-78c7f742f0a7 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.517104] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] f0a011bb-4939-4384-885c-6ce482875b4e/f0a011bb-4939-4384-885c-6ce482875b4e.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 708.517747] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-173a919d-dfa1-4967-aaa8-8e8a80a623dd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.539252] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 708.539252] env[61998]: value = "task-1388447" [ 708.539252] env[61998]: _type = "Task" [ 708.539252] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.549328] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388447, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.656517] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 708.937908] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6baf695d-ab6a-40b3-914f-c663ad35c1eb tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "297c345a-a825-47b1-a9e4-a353758d32ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.649s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.045312] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1076da-2b33-4b68-8786-fb621d8bffee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.053372] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388447, 'name': ReconfigVM_Task, 'duration_secs': 0.278707} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.055222] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Reconfigured VM instance instance-00000026 to attach disk [datastore2] f0a011bb-4939-4384-885c-6ce482875b4e/f0a011bb-4939-4384-885c-6ce482875b4e.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.055886] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b031f210-22c5-4ecc-b3ab-c3b010b9c08d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.058139] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f528123-ba14-41ce-92c8-7f2b38a461d6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.091680] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c01b67-89f9-4af9-a735-25bfbb9bf2c0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.094361] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 709.094361] env[61998]: value = "task-1388448" [ 709.094361] env[61998]: _type = "Task" [ 709.094361] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.101387] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bb74c7-f81e-4e80-a5ed-2d9f23a25e7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.108011] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388448, 'name': Rename_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.117532] env[61998]: DEBUG nova.compute.provider_tree [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.351693] env[61998]: DEBUG nova.compute.manager [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Received event network-changed-b9ef6b1d-c010-4647-9039-78c7f742f0a7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 709.351875] env[61998]: DEBUG nova.compute.manager [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Refreshing instance network info cache due to event network-changed-b9ef6b1d-c010-4647-9039-78c7f742f0a7. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 709.352097] env[61998]: DEBUG oslo_concurrency.lockutils [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] Acquiring lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.352240] env[61998]: DEBUG oslo_concurrency.lockutils [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] Acquired lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.352479] env[61998]: DEBUG nova.network.neutron [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Refreshing network info cache for port b9ef6b1d-c010-4647-9039-78c7f742f0a7 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.443130] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 709.608351] env[61998]: ERROR nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. [ 709.608351] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 709.608351] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 709.608351] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 709.608351] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.608351] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 709.608351] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.608351] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 709.608351] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.608351] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 709.608351] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.608351] env[61998]: ERROR nova.compute.manager raise self.value [ 709.608351] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.608351] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 709.608351] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.608351] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 709.608815] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.608815] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 709.608815] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. [ 709.608815] env[61998]: ERROR nova.compute.manager [ 709.608815] env[61998]: Traceback (most recent call last): [ 709.608815] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 709.608815] env[61998]: listener.cb(fileno) [ 709.608815] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.608815] env[61998]: result = function(*args, **kwargs) [ 709.608815] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.608815] env[61998]: return func(*args, **kwargs) [ 709.608815] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 709.608815] env[61998]: raise e [ 709.608815] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 709.608815] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 709.608815] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.608815] env[61998]: created_port_ids = self._update_ports_for_instance( [ 709.608815] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.608815] env[61998]: with excutils.save_and_reraise_exception(): [ 709.608815] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.608815] env[61998]: self.force_reraise() [ 709.608815] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.608815] env[61998]: raise self.value [ 709.608815] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.608815] env[61998]: updated_port = self._update_port( [ 709.608815] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.608815] env[61998]: _ensure_no_port_binding_failure(port) [ 709.608815] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.608815] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 709.609579] env[61998]: nova.exception.PortBindingFailed: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. [ 709.609579] env[61998]: Removing descriptor: 17 [ 709.611418] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388448, 'name': Rename_Task, 'duration_secs': 0.129414} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.611837] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 709.612524] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7fde7817-201a-4028-aaff-aaa1c25650c6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.621616] env[61998]: DEBUG nova.scheduler.client.report [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 709.623898] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 709.623898] env[61998]: value = "task-1388449" [ 709.623898] env[61998]: _type = "Task" [ 709.623898] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.632944] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388449, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.668150] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 709.688766] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.689045] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.689217] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.689399] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.689539] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.689679] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.689883] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.690132] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.690349] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.690551] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.690757] env[61998]: DEBUG nova.virt.hardware [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.691662] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dd764a-0eb1-491b-be8c-027be1d66b61 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.699618] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da77289d-69d3-4738-8cbd-b498e73c1afd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.715017] env[61998]: ERROR nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Traceback (most recent call last): [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] yield resources [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self.driver.spawn(context, instance, image_meta, [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] vm_ref = self.build_virtual_machine(instance, [ 709.715017] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] for vif in network_info: [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] return self._sync_wrapper(fn, *args, **kwargs) [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self.wait() [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self[:] = self._gt.wait() [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] return self._exit_event.wait() [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 709.717574] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] current.throw(*self._exc) [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] result = function(*args, **kwargs) [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] return func(*args, **kwargs) [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] raise e [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] nwinfo = self.network_api.allocate_for_instance( [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] created_port_ids = self._update_ports_for_instance( [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] with excutils.save_and_reraise_exception(): [ 709.717912] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self.force_reraise() [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] raise self.value [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] updated_port = self._update_port( [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] _ensure_no_port_binding_failure(port) [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] raise exception.PortBindingFailed(port_id=port['id']) [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] nova.exception.PortBindingFailed: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. [ 709.718282] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] [ 709.718282] env[61998]: INFO nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Terminating instance [ 709.719430] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Acquiring lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.871723] env[61998]: DEBUG nova.network.neutron [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.964414] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.965398] env[61998]: DEBUG nova.network.neutron [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.125609] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.126167] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 710.128893] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.880s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.140613] env[61998]: DEBUG oslo_vmware.api [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388449, 'name': PowerOnVM_Task, 'duration_secs': 0.51449} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.140851] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.141054] env[61998]: INFO nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Took 4.85 seconds to spawn the instance on the hypervisor. [ 710.141273] env[61998]: DEBUG nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 710.142045] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db207253-05b0-47fe-b034-efb9f9e16a76 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.316051] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "b3232fcd-43b2-4139-afe1-fbe863d0af30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.317106] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "b3232fcd-43b2-4139-afe1-fbe863d0af30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.468566] env[61998]: DEBUG oslo_concurrency.lockutils [req-24929548-d979-42e8-be65-f8ade6b32571 req-5f99be43-0401-4b41-ab68-fbe4cbb52f23 service nova] Releasing lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.468917] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Acquired lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.469184] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.640611] env[61998]: DEBUG nova.compute.utils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.642213] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 710.642404] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 710.660605] env[61998]: INFO nova.compute.manager [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Took 24.89 seconds to build instance. [ 710.694316] env[61998]: DEBUG nova.policy [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e44d073b21ab4e7f9a54b57eb8c6b223', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '968bdbf51c464083b56951c00168b830', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 710.998021] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9d4373-3f02-401f-b4da-56ecb1d9e3ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.002794] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.010441] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4072da8-726a-4ea1-9b2f-086799f1b5e6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.043017] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Successfully created port: d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.045454] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5f3996-0331-47cc-9d78-8a3b89ab9258 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.054669] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b1b13b-8131-4464-808e-23e57ab32fe6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.068705] env[61998]: DEBUG nova.compute.provider_tree [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.090047] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.146805] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 711.162558] env[61998]: DEBUG oslo_concurrency.lockutils [None req-abb1ea3b-a23c-42a9-a222-cff58f232cd3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "f0a011bb-4939-4384-885c-6ce482875b4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.810s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.569360] env[61998]: DEBUG nova.compute.manager [req-6ed808d7-fee7-4525-83ff-cb046195fb18 req-ed554d0c-b672-4320-b53f-52e33d396425 service nova] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Received event network-vif-deleted-b9ef6b1d-c010-4647-9039-78c7f742f0a7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 711.571885] env[61998]: DEBUG nova.scheduler.client.report [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 711.593680] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Releasing lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.594108] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 711.594293] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 711.595190] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8ac5158-7e33-4ba5-882e-75f51304edf9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.608236] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beacbc43-4db1-4e5a-bc83-780f9ce2d547 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.634085] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e could not be found. [ 711.634515] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 711.634814] env[61998]: INFO nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 711.635187] env[61998]: DEBUG oslo.service.loopingcall [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.635558] env[61998]: DEBUG nova.compute.manager [-] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 711.636890] env[61998]: DEBUG nova.network.neutron [-] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.661807] env[61998]: DEBUG nova.network.neutron [-] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.666873] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 711.877959] env[61998]: DEBUG nova.compute.manager [None req-10130356-9a52-410e-892e-99a0862f3852 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 711.879214] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75b024c-09fa-4cd8-9047-f8ca5af92e69 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.077495] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.948s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.078167] env[61998]: ERROR nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Traceback (most recent call last): [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self.driver.spawn(context, instance, image_meta, [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] vm_ref = self.build_virtual_machine(instance, [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.078167] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] for vif in network_info: [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] return self._sync_wrapper(fn, *args, **kwargs) [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self.wait() [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self[:] = self._gt.wait() [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] return self._exit_event.wait() [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] current.throw(*self._exc) [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.078592] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] result = function(*args, **kwargs) [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] return func(*args, **kwargs) [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] raise e [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] nwinfo = self.network_api.allocate_for_instance( [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] created_port_ids = self._update_ports_for_instance( [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] with excutils.save_and_reraise_exception(): [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] self.force_reraise() [ 712.079032] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] raise self.value [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] updated_port = self._update_port( [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] _ensure_no_port_binding_failure(port) [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] raise exception.PortBindingFailed(port_id=port['id']) [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] nova.exception.PortBindingFailed: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. [ 712.079462] env[61998]: ERROR nova.compute.manager [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] [ 712.079462] env[61998]: DEBUG nova.compute.utils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 712.080090] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.209s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.082083] env[61998]: INFO nova.compute.claims [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.084765] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Build of instance 35b6490b-eec9-4dc1-9de3-63c368bdc5d7 was re-scheduled: Binding failed for port e91bcb9a-9c45-437f-9f97-6497434ed3f3, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 712.085164] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 712.085389] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Acquiring lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.085529] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Acquired lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.085684] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.114902] env[61998]: ERROR nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. [ 712.114902] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 712.114902] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 712.114902] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 712.114902] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.114902] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 712.114902] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.114902] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 712.114902] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.114902] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 712.114902] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.114902] env[61998]: ERROR nova.compute.manager raise self.value [ 712.114902] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.114902] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 712.114902] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.114902] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 712.115963] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.115963] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 712.115963] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. [ 712.115963] env[61998]: ERROR nova.compute.manager [ 712.115963] env[61998]: Traceback (most recent call last): [ 712.115963] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 712.115963] env[61998]: listener.cb(fileno) [ 712.115963] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.115963] env[61998]: result = function(*args, **kwargs) [ 712.115963] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.115963] env[61998]: return func(*args, **kwargs) [ 712.115963] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 712.115963] env[61998]: raise e [ 712.115963] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 712.115963] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 712.115963] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.115963] env[61998]: created_port_ids = self._update_ports_for_instance( [ 712.115963] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.115963] env[61998]: with excutils.save_and_reraise_exception(): [ 712.115963] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.115963] env[61998]: self.force_reraise() [ 712.115963] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.115963] env[61998]: raise self.value [ 712.115963] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.115963] env[61998]: updated_port = self._update_port( [ 712.115963] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.115963] env[61998]: _ensure_no_port_binding_failure(port) [ 712.115963] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.115963] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 712.117228] env[61998]: nova.exception.PortBindingFailed: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. [ 712.117228] env[61998]: Removing descriptor: 17 [ 712.144035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "f0a011bb-4939-4384-885c-6ce482875b4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.144035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "f0a011bb-4939-4384-885c-6ce482875b4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.144035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "f0a011bb-4939-4384-885c-6ce482875b4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.144035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "f0a011bb-4939-4384-885c-6ce482875b4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.144347] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "f0a011bb-4939-4384-885c-6ce482875b4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.145467] env[61998]: INFO nova.compute.manager [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Terminating instance [ 712.147426] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "refresh_cache-f0a011bb-4939-4384-885c-6ce482875b4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.147698] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquired lock "refresh_cache-f0a011bb-4939-4384-885c-6ce482875b4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.147764] env[61998]: DEBUG nova.network.neutron [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.156997] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 712.162349] env[61998]: DEBUG nova.network.neutron [-] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.182408] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.182649] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.182804] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.183172] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.183172] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.183292] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.183473] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.183672] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.183789] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.183993] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.184126] env[61998]: DEBUG nova.virt.hardware [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.185246] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6255fb3c-0d7e-4ad3-9130-bfec5b633205 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.189278] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.193350] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0864d208-066e-4777-b0a8-63ea5dde92f1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.207218] env[61998]: ERROR nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Traceback (most recent call last): [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] yield resources [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self.driver.spawn(context, instance, image_meta, [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] vm_ref = self.build_virtual_machine(instance, [ 712.207218] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] for vif in network_info: [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] return self._sync_wrapper(fn, *args, **kwargs) [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self.wait() [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self[:] = self._gt.wait() [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] return self._exit_event.wait() [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 712.207595] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] current.throw(*self._exc) [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] result = function(*args, **kwargs) [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] return func(*args, **kwargs) [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] raise e [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] nwinfo = self.network_api.allocate_for_instance( [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] created_port_ids = self._update_ports_for_instance( [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] with excutils.save_and_reraise_exception(): [ 712.208105] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self.force_reraise() [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] raise self.value [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] updated_port = self._update_port( [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] _ensure_no_port_binding_failure(port) [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] raise exception.PortBindingFailed(port_id=port['id']) [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] nova.exception.PortBindingFailed: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. [ 712.208608] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] [ 712.208608] env[61998]: INFO nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Terminating instance [ 712.209840] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Acquiring lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.209991] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Acquired lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.210197] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.391191] env[61998]: INFO nova.compute.manager [None req-10130356-9a52-410e-892e-99a0862f3852 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] instance snapshotting [ 712.391794] env[61998]: DEBUG nova.objects.instance [None req-10130356-9a52-410e-892e-99a0862f3852 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lazy-loading 'flavor' on Instance uuid f0a011bb-4939-4384-885c-6ce482875b4e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 712.606404] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.663448] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.664956] env[61998]: INFO nova.compute.manager [-] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Took 1.03 seconds to deallocate network for instance. [ 712.666764] env[61998]: DEBUG nova.compute.claims [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 712.666932] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.672910] env[61998]: DEBUG nova.network.neutron [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.718888] env[61998]: DEBUG nova.network.neutron [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.732139] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.812369] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.898242] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29466160-e9f1-4e97-916e-c6c304e56758 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.917397] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b58cb25-abde-48a4-a8ae-2d117d01c114 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.167337] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Releasing lock "refresh_cache-35b6490b-eec9-4dc1-9de3-63c368bdc5d7" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.167626] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 713.167745] env[61998]: DEBUG nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 713.167928] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.184908] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.223639] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Releasing lock "refresh_cache-f0a011bb-4939-4384-885c-6ce482875b4e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.223639] env[61998]: DEBUG nova.compute.manager [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 713.223639] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.224679] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e47a141-c4b3-4dfc-8d26-a38e5a6936b8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.233238] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.233504] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcca3f26-9ccb-4ae5-ac51-65e129a850f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.241712] env[61998]: DEBUG oslo_vmware.api [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 713.241712] env[61998]: value = "task-1388450" [ 713.241712] env[61998]: _type = "Task" [ 713.241712] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.252523] env[61998]: DEBUG oslo_vmware.api [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.314789] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Releasing lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.315262] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 713.315477] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.316726] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e9fae53-377f-403a-9d0f-d59c1a5d59ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.324854] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3679ae77-f3c4-434b-83ff-7e784899576a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.351839] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f163fb1b-400f-4abb-8df6-0d9ea6449166 could not be found. [ 713.352080] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 713.352265] env[61998]: INFO nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Took 0.04 seconds to destroy the instance on the hypervisor. [ 713.352595] env[61998]: DEBUG oslo.service.loopingcall [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.355063] env[61998]: DEBUG nova.compute.manager [-] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 713.355177] env[61998]: DEBUG nova.network.neutron [-] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.429019] env[61998]: DEBUG nova.compute.manager [None req-10130356-9a52-410e-892e-99a0862f3852 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Instance disappeared during snapshot {{(pid=61998) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4550}} [ 713.432767] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129f8c77-b876-496a-bced-d5381b326605 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.440437] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa479e7e-31f2-489b-a493-ba027a76bda4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.476603] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb861ad-17ce-410a-9377-0aa71c4b38d6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.484357] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65646f7-e7c1-4621-9254-6d6d396cac94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.497544] env[61998]: DEBUG nova.compute.provider_tree [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.516953] env[61998]: DEBUG nova.network.neutron [-] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.587502] env[61998]: DEBUG nova.compute.manager [None req-10130356-9a52-410e-892e-99a0862f3852 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Found 0 images (rotation: 2) {{(pid=61998) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4853}} [ 713.590746] env[61998]: DEBUG nova.compute.manager [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Received event network-changed-d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 713.590904] env[61998]: DEBUG nova.compute.manager [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Refreshing instance network info cache due to event network-changed-d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 713.591148] env[61998]: DEBUG oslo_concurrency.lockutils [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] Acquiring lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.591356] env[61998]: DEBUG oslo_concurrency.lockutils [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] Acquired lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.591516] env[61998]: DEBUG nova.network.neutron [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Refreshing network info cache for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.687095] env[61998]: DEBUG nova.network.neutron [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.751964] env[61998]: DEBUG oslo_vmware.api [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388450, 'name': PowerOffVM_Task, 'duration_secs': 0.120475} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.752311] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.752522] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.752793] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54010789-f573-44fb-aaf0-594624e89a40 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.778640] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.778763] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.778876] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Deleting the datastore file [datastore2] f0a011bb-4939-4384-885c-6ce482875b4e {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.779146] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-49ed402b-d7fa-482d-8ddf-3f7f549762d7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.785455] env[61998]: DEBUG oslo_vmware.api [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for the task: (returnval){ [ 713.785455] env[61998]: value = "task-1388452" [ 713.785455] env[61998]: _type = "Task" [ 713.785455] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.793197] env[61998]: DEBUG oslo_vmware.api [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.001997] env[61998]: DEBUG nova.scheduler.client.report [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 714.020278] env[61998]: DEBUG nova.network.neutron [-] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.109014] env[61998]: DEBUG nova.network.neutron [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.189483] env[61998]: INFO nova.compute.manager [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] [instance: 35b6490b-eec9-4dc1-9de3-63c368bdc5d7] Took 1.02 seconds to deallocate network for instance. [ 714.196268] env[61998]: DEBUG nova.network.neutron [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.296022] env[61998]: DEBUG oslo_vmware.api [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Task: {'id': task-1388452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093203} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.296262] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.296455] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.296624] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.296790] env[61998]: INFO nova.compute.manager [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Took 1.07 seconds to destroy the instance on the hypervisor. [ 714.297040] env[61998]: DEBUG oslo.service.loopingcall [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.297235] env[61998]: DEBUG nova.compute.manager [-] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 714.297338] env[61998]: DEBUG nova.network.neutron [-] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.312784] env[61998]: DEBUG nova.network.neutron [-] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.506665] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.507045] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 714.509864] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.764s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.511556] env[61998]: INFO nova.compute.claims [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.522673] env[61998]: INFO nova.compute.manager [-] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Took 1.17 seconds to deallocate network for instance. [ 714.524979] env[61998]: DEBUG nova.compute.claims [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 714.524979] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.698604] env[61998]: DEBUG oslo_concurrency.lockutils [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] Releasing lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.698788] env[61998]: DEBUG nova.compute.manager [req-17691013-8d06-4111-bf6f-66b06431d5a0 req-e706745a-be3d-450e-b76b-c7487ced1f56 service nova] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Received event network-vif-deleted-d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 714.815996] env[61998]: DEBUG nova.network.neutron [-] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.016615] env[61998]: DEBUG nova.compute.utils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.020556] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 715.020745] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 715.068088] env[61998]: DEBUG nova.policy [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a321dcae5838472b85cad4facdf7d150', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65a8f5d7bcf841e6a12dc958d9ca3313', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 715.226101] env[61998]: INFO nova.scheduler.client.report [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Deleted allocations for instance 35b6490b-eec9-4dc1-9de3-63c368bdc5d7 [ 715.318486] env[61998]: INFO nova.compute.manager [-] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Took 1.02 seconds to deallocate network for instance. [ 715.430514] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Successfully created port: 12a0f3b3-5d1a-4696-91f3-13095628d816 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.521565] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 715.736369] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0dd4ad7f-9599-4862-b039-c4f1333967b3 tempest-TenantUsagesTestJSON-1750163327 tempest-TenantUsagesTestJSON-1750163327-project-member] Lock "35b6490b-eec9-4dc1-9de3-63c368bdc5d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.269s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.824733] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.905263] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbd8a60-a467-4b28-bb08-450b5ba32573 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.913867] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84456031-8994-4003-8e02-b5ad93f1f305 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.945542] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d4db18-c5f0-4fcc-aa07-99dcd44e4fa7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.954890] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0be1f4-9a24-4093-8360-9ff0e29a5075 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.971919] env[61998]: DEBUG nova.compute.provider_tree [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.029579] env[61998]: INFO nova.virt.block_device [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Booting with volume f17ef207-7606-4ed1-9067-d506ba086d15 at /dev/sda [ 716.086366] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f152451c-ed12-4b7e-b18a-5ecf718d259c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.096390] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbef6a70-54ac-41f7-88cc-68bde97148b0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.122342] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f187062-1a00-40e6-8cb9-3acb22ed26a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.130437] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bcd3504-ed13-4331-87a9-f55434991229 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.157541] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d546b85b-23fd-440e-93d0-9141cd66c6a7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.164776] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c819001-df05-4c49-8c12-cb0b8189d02c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.181690] env[61998]: DEBUG nova.virt.block_device [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Updating existing volume attachment record: 407e143a-08b2-4fc4-825d-2b773921327e {{(pid=61998) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 716.242023] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 716.428545] env[61998]: DEBUG nova.compute.manager [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Received event network-changed-12a0f3b3-5d1a-4696-91f3-13095628d816 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 716.428774] env[61998]: DEBUG nova.compute.manager [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Refreshing instance network info cache due to event network-changed-12a0f3b3-5d1a-4696-91f3-13095628d816. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 716.428986] env[61998]: DEBUG oslo_concurrency.lockutils [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] Acquiring lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.429140] env[61998]: DEBUG oslo_concurrency.lockutils [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] Acquired lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.429293] env[61998]: DEBUG nova.network.neutron [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Refreshing network info cache for port 12a0f3b3-5d1a-4696-91f3-13095628d816 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.475139] env[61998]: DEBUG nova.scheduler.client.report [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 716.626829] env[61998]: ERROR nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. [ 716.626829] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 716.626829] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 716.626829] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 716.626829] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.626829] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 716.626829] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.626829] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 716.626829] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.626829] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 716.626829] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.626829] env[61998]: ERROR nova.compute.manager raise self.value [ 716.626829] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.626829] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 716.626829] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.626829] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 716.627522] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.627522] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 716.627522] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. [ 716.627522] env[61998]: ERROR nova.compute.manager [ 716.627522] env[61998]: Traceback (most recent call last): [ 716.627522] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 716.627522] env[61998]: listener.cb(fileno) [ 716.627522] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 716.627522] env[61998]: result = function(*args, **kwargs) [ 716.627522] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 716.627522] env[61998]: return func(*args, **kwargs) [ 716.627522] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 716.627522] env[61998]: raise e [ 716.627522] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 716.627522] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 716.627522] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.627522] env[61998]: created_port_ids = self._update_ports_for_instance( [ 716.627522] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.627522] env[61998]: with excutils.save_and_reraise_exception(): [ 716.627522] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.627522] env[61998]: self.force_reraise() [ 716.627522] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.627522] env[61998]: raise self.value [ 716.627522] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.627522] env[61998]: updated_port = self._update_port( [ 716.627522] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.627522] env[61998]: _ensure_no_port_binding_failure(port) [ 716.627522] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.627522] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 716.628402] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. [ 716.628402] env[61998]: Removing descriptor: 17 [ 716.766132] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.959274] env[61998]: DEBUG nova.network.neutron [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.979584] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.980637] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 716.984062] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.162s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.984495] env[61998]: INFO nova.compute.claims [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.062131] env[61998]: DEBUG nova.network.neutron [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.489985] env[61998]: DEBUG nova.compute.utils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 717.498492] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 717.498681] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 717.556161] env[61998]: DEBUG nova.policy [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35ebccc36a99496d97bd8c1247f55dac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56878d75e1654be4ab2d39f289e82e52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 717.565137] env[61998]: DEBUG oslo_concurrency.lockutils [req-b82b4b39-e26a-4bef-b447-20802a36b939 req-66ae1680-d6de-4e5f-9ebd-ba32c0611bbb service nova] Releasing lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.826177] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Successfully created port: 96270278-0740-4f05-9455-036a4f52f677 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.002148] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 718.309657] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 718.310853] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 718.311130] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 718.311290] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.311505] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 718.311650] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.311792] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 718.311997] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 718.312186] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 718.312377] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 718.312543] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 718.312712] env[61998]: DEBUG nova.virt.hardware [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 718.314104] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93509a3f-41fd-4b0b-a641-7cdc0ed425c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.322663] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b63b1f-b5ae-440d-a398-f2928892ff3e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.342510] env[61998]: ERROR nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Traceback (most recent call last): [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] yield resources [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self.driver.spawn(context, instance, image_meta, [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self._vmops.spawn(context, instance, image_meta, injected_files, [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] vm_ref = self.build_virtual_machine(instance, [ 718.342510] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] vif_infos = vmwarevif.get_vif_info(self._session, [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] for vif in network_info: [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] return self._sync_wrapper(fn, *args, **kwargs) [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self.wait() [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self[:] = self._gt.wait() [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] return self._exit_event.wait() [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 718.342987] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] current.throw(*self._exc) [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] result = function(*args, **kwargs) [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] return func(*args, **kwargs) [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] raise e [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] nwinfo = self.network_api.allocate_for_instance( [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] created_port_ids = self._update_ports_for_instance( [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] with excutils.save_and_reraise_exception(): [ 718.343437] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self.force_reraise() [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] raise self.value [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] updated_port = self._update_port( [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] _ensure_no_port_binding_failure(port) [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] raise exception.PortBindingFailed(port_id=port['id']) [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] nova.exception.PortBindingFailed: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. [ 718.343817] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] [ 718.343817] env[61998]: INFO nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Terminating instance [ 718.345697] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Acquiring lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.345697] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Acquired lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.345861] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.395245] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61227a9c-e2c3-4531-8741-1580f958069c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.405058] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a472b54b-11e2-4a8f-8b22-fd2e979b82ba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.437094] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0335ee9e-a0a2-43c3-9b27-48ec619a3dbc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.444880] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8f4087-b305-4810-9f55-1bb6ee9dab1e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.459986] env[61998]: DEBUG nova.compute.provider_tree [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.539483] env[61998]: DEBUG nova.compute.manager [req-78e84a41-d6a3-43e3-9356-a13675ee074b req-3dbbca23-a9e9-499a-8514-60e5c62c3840 service nova] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Received event network-vif-deleted-12a0f3b3-5d1a-4696-91f3-13095628d816 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 718.876571] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.892237] env[61998]: ERROR nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. [ 718.892237] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 718.892237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 718.892237] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 718.892237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 718.892237] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 718.892237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 718.892237] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 718.892237] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.892237] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 718.892237] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.892237] env[61998]: ERROR nova.compute.manager raise self.value [ 718.892237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 718.892237] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 718.892237] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.892237] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 718.892743] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.892743] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 718.892743] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. [ 718.892743] env[61998]: ERROR nova.compute.manager [ 718.892743] env[61998]: Traceback (most recent call last): [ 718.892743] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 718.892743] env[61998]: listener.cb(fileno) [ 718.892743] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 718.892743] env[61998]: result = function(*args, **kwargs) [ 718.892743] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 718.892743] env[61998]: return func(*args, **kwargs) [ 718.892743] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 718.892743] env[61998]: raise e [ 718.892743] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 718.892743] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 718.892743] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 718.892743] env[61998]: created_port_ids = self._update_ports_for_instance( [ 718.892743] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 718.892743] env[61998]: with excutils.save_and_reraise_exception(): [ 718.892743] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 718.892743] env[61998]: self.force_reraise() [ 718.892743] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 718.892743] env[61998]: raise self.value [ 718.892743] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 718.892743] env[61998]: updated_port = self._update_port( [ 718.892743] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 718.892743] env[61998]: _ensure_no_port_binding_failure(port) [ 718.892743] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 718.892743] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 718.893614] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. [ 718.893614] env[61998]: Removing descriptor: 17 [ 718.962655] env[61998]: DEBUG nova.scheduler.client.report [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 719.002286] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.012148] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 719.041356] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 719.041712] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 719.041812] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.041933] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 719.042095] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.042242] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 719.042462] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 719.042763] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 719.042869] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 719.042947] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 719.043141] env[61998]: DEBUG nova.virt.hardware [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 719.044040] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42112196-7dfa-42ab-9783-12f041223da8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.052679] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d83cb82-2128-4145-aef8-7bf6aa0aaea8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.067420] env[61998]: ERROR nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Traceback (most recent call last): [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] yield resources [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self.driver.spawn(context, instance, image_meta, [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] vm_ref = self.build_virtual_machine(instance, [ 719.067420] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] for vif in network_info: [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] return self._sync_wrapper(fn, *args, **kwargs) [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self.wait() [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self[:] = self._gt.wait() [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] return self._exit_event.wait() [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 719.068068] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] current.throw(*self._exc) [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] result = function(*args, **kwargs) [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] return func(*args, **kwargs) [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] raise e [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] nwinfo = self.network_api.allocate_for_instance( [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] created_port_ids = self._update_ports_for_instance( [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] with excutils.save_and_reraise_exception(): [ 719.068872] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self.force_reraise() [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] raise self.value [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] updated_port = self._update_port( [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] _ensure_no_port_binding_failure(port) [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] raise exception.PortBindingFailed(port_id=port['id']) [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] nova.exception.PortBindingFailed: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. [ 719.069422] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] [ 719.069422] env[61998]: INFO nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Terminating instance [ 719.069819] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Acquiring lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.069819] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Acquired lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.070020] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.470959] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.472818] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 719.476368] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.880s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.504984] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Releasing lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.505587] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 719.505914] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6adf3447-adde-4ac5-bac7-eb517a263912 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.515442] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc8aa26-2bec-4839-acce-95a2fcc4f1de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.540155] env[61998]: WARNING nova.virt.vmwareapi.driver [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 9025d114-10da-4cf8-9e5f-2520bfd3b246 could not be found. [ 719.540419] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.541018] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a79951db-8642-4275-a7e7-23ce0494e924 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.549851] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a34f1af-b312-4bca-aff4-575071acfa9f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.576154] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9025d114-10da-4cf8-9e5f-2520bfd3b246 could not be found. [ 719.576270] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.576453] env[61998]: INFO nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Took 0.07 seconds to destroy the instance on the hypervisor. [ 719.579851] env[61998]: DEBUG oslo.service.loopingcall [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.579851] env[61998]: DEBUG nova.compute.manager [-] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 719.579851] env[61998]: DEBUG nova.network.neutron [-] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.595226] env[61998]: DEBUG nova.network.neutron [-] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.597053] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.728320] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.983022] env[61998]: DEBUG nova.compute.utils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 719.983022] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 719.983022] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 720.053996] env[61998]: DEBUG nova.policy [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3bcc50fb12d24d608e9daa6d25e0b639', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75fb3ec40747494bb0c66a95f7ba1025', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.100065] env[61998]: DEBUG nova.network.neutron [-] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.232358] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Releasing lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.232798] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 720.233008] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.233341] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd3b61cc-8cdf-4612-80cd-b801dd7c8780 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.245208] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796b9787-cc9f-41d3-8f26-92d2d0b128b8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.273149] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179 could not be found. [ 720.273387] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 720.273563] env[61998]: INFO nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Took 0.04 seconds to destroy the instance on the hypervisor. [ 720.273801] env[61998]: DEBUG oslo.service.loopingcall [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.276481] env[61998]: DEBUG nova.compute.manager [-] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 720.276481] env[61998]: DEBUG nova.network.neutron [-] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 720.303427] env[61998]: DEBUG nova.network.neutron [-] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.398277] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed897a4-1bf7-44c1-8145-e5cd44050e73 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.402166] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Successfully created port: d0ded777-e6e9-4fe3-84d7-e2da98acad08 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.410089] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7803044a-8075-4977-ba52-4040b49bd999 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.448727] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680ad0c6-ee52-4f20-9fdd-b148134c3ed9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.455943] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b426d3a-4a0e-4234-96be-683e94bbbb1b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.468903] env[61998]: DEBUG nova.compute.provider_tree [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.486654] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 720.513476] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Acquiring lock "55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.513751] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Lock "55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.568673] env[61998]: DEBUG nova.compute.manager [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Received event network-changed-96270278-0740-4f05-9455-036a4f52f677 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 720.568907] env[61998]: DEBUG nova.compute.manager [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Refreshing instance network info cache due to event network-changed-96270278-0740-4f05-9455-036a4f52f677. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 720.569074] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] Acquiring lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.569152] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] Acquired lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.569307] env[61998]: DEBUG nova.network.neutron [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Refreshing network info cache for port 96270278-0740-4f05-9455-036a4f52f677 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 720.603019] env[61998]: INFO nova.compute.manager [-] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Took 1.03 seconds to deallocate network for instance. [ 720.806168] env[61998]: DEBUG nova.network.neutron [-] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.971592] env[61998]: DEBUG nova.scheduler.client.report [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 720.991052] env[61998]: INFO nova.virt.block_device [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Booting with volume 8bca6884-75a3-48e6-9e70-249702cc53eb at /dev/sda [ 721.042875] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33eda0c7-3cfb-4a6b-a968-2f464619eadc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.053743] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0500cfb-3ba3-4d5c-8617-e6915c2c154c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.078545] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-267186ec-4ae9-438a-9f01-8697ee93c2a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.086178] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7649ad-0cd0-484b-9ddd-200b343bb3bb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.096964] env[61998]: DEBUG nova.network.neutron [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.112085] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1ac66b-2592-47f5-94c6-d4bb1268e8ca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.118421] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5a7fba-12bd-48fe-911d-a8c564af25df {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.131673] env[61998]: DEBUG nova.virt.block_device [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Updating existing volume attachment record: 185707c8-85cb-4e6f-a6d2-edba57639f33 {{(pid=61998) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 721.184023] env[61998]: INFO nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Took 0.58 seconds to detach 1 volumes for instance. [ 721.186637] env[61998]: DEBUG nova.compute.claims [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 721.187526] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.197795] env[61998]: DEBUG nova.network.neutron [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.206956] env[61998]: DEBUG nova.compute.manager [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Received event network-changed-d0ded777-e6e9-4fe3-84d7-e2da98acad08 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 721.207223] env[61998]: DEBUG nova.compute.manager [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Refreshing instance network info cache due to event network-changed-d0ded777-e6e9-4fe3-84d7-e2da98acad08. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 721.207445] env[61998]: DEBUG oslo_concurrency.lockutils [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] Acquiring lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.207587] env[61998]: DEBUG oslo_concurrency.lockutils [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] Acquired lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.207743] env[61998]: DEBUG nova.network.neutron [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Refreshing network info cache for port d0ded777-e6e9-4fe3-84d7-e2da98acad08 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.309693] env[61998]: INFO nova.compute.manager [-] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Took 1.03 seconds to deallocate network for instance. [ 721.311965] env[61998]: DEBUG nova.compute.claims [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 721.312150] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.462887] env[61998]: ERROR nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. [ 721.462887] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 721.462887] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.462887] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 721.462887] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.462887] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 721.462887] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.462887] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 721.462887] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.462887] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 721.462887] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.462887] env[61998]: ERROR nova.compute.manager raise self.value [ 721.462887] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.462887] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 721.462887] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.462887] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 721.463621] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.463621] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 721.463621] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. [ 721.463621] env[61998]: ERROR nova.compute.manager [ 721.463621] env[61998]: Traceback (most recent call last): [ 721.463621] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 721.463621] env[61998]: listener.cb(fileno) [ 721.463621] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.463621] env[61998]: result = function(*args, **kwargs) [ 721.463621] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 721.463621] env[61998]: return func(*args, **kwargs) [ 721.463621] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 721.463621] env[61998]: raise e [ 721.463621] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.463621] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 721.463621] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.463621] env[61998]: created_port_ids = self._update_ports_for_instance( [ 721.463621] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.463621] env[61998]: with excutils.save_and_reraise_exception(): [ 721.463621] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.463621] env[61998]: self.force_reraise() [ 721.463621] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.463621] env[61998]: raise self.value [ 721.463621] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.463621] env[61998]: updated_port = self._update_port( [ 721.463621] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.463621] env[61998]: _ensure_no_port_binding_failure(port) [ 721.463621] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.463621] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 721.464624] env[61998]: nova.exception.PortBindingFailed: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. [ 721.464624] env[61998]: Removing descriptor: 15 [ 721.478708] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.478708] env[61998]: ERROR nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. [ 721.478708] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Traceback (most recent call last): [ 721.478708] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 721.478708] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self.driver.spawn(context, instance, image_meta, [ 721.478708] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 721.478708] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.478708] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.478708] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] vm_ref = self.build_virtual_machine(instance, [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] for vif in network_info: [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] return self._sync_wrapper(fn, *args, **kwargs) [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self.wait() [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self[:] = self._gt.wait() [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] return self._exit_event.wait() [ 721.479185] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] current.throw(*self._exc) [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] result = function(*args, **kwargs) [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] return func(*args, **kwargs) [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] raise e [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] nwinfo = self.network_api.allocate_for_instance( [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] created_port_ids = self._update_ports_for_instance( [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.479554] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] with excutils.save_and_reraise_exception(): [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] self.force_reraise() [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] raise self.value [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] updated_port = self._update_port( [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] _ensure_no_port_binding_failure(port) [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] raise exception.PortBindingFailed(port_id=port['id']) [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] nova.exception.PortBindingFailed: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. [ 721.480160] env[61998]: ERROR nova.compute.manager [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] [ 721.480524] env[61998]: DEBUG nova.compute.utils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 721.480524] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.225s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.484453] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Build of instance 9da95edb-f9fb-40f3-9317-d27f1bae0ecf was re-scheduled: Binding failed for port 7aacda59-a6a6-4e46-a147-217efa725665, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 721.484453] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 721.484453] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquiring lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.484453] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Acquired lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.484670] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.702311] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] Releasing lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.702600] env[61998]: DEBUG nova.compute.manager [req-7a4c7668-b17d-4edc-b491-014ec3f24e59 req-41460156-2ed3-4e88-babd-4fd8f3ab2626 service nova] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Received event network-vif-deleted-96270278-0740-4f05-9455-036a4f52f677 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 721.729676] env[61998]: DEBUG nova.network.neutron [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.808190] env[61998]: DEBUG nova.network.neutron [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.005411] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.106989] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.291044] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a1f64d-9f11-4014-bd07-ddae98fdb88a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.298628] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9be4e53-e39d-4910-8256-62db65af60e2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.327618] env[61998]: DEBUG oslo_concurrency.lockutils [req-7d2409ce-5e0d-4125-ae4b-564d3bb1b204 req-eccb41f8-03f8-4b17-84a5-b4241855771e service nova] Releasing lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.328543] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103cd499-d3ee-4e7e-8221-6459cc1367ea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.335260] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6439b1c9-7f1e-44d1-8e86-1851efebd313 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.347951] env[61998]: DEBUG nova.compute.provider_tree [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.609871] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Releasing lock "refresh_cache-9da95edb-f9fb-40f3-9317-d27f1bae0ecf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.610234] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 722.610433] env[61998]: DEBUG nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 722.610810] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.630306] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.850891] env[61998]: DEBUG nova.scheduler.client.report [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 723.134024] env[61998]: DEBUG nova.network.neutron [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.233027] env[61998]: DEBUG nova.compute.manager [req-79c27366-0dc2-41bd-b28e-03d026fee895 req-eaef3a19-70f1-4b59-a562-1dd649de7c6d service nova] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Received event network-vif-deleted-d0ded777-e6e9-4fe3-84d7-e2da98acad08 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 723.237463] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 723.238032] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 723.238187] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 723.238366] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.238578] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 723.238729] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.238872] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 723.239082] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 723.239240] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 723.239401] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 723.239556] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 723.239854] env[61998]: DEBUG nova.virt.hardware [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 723.241128] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff073d52-aaa4-47b9-8a29-f1f6f033b305 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.250498] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e522742-f2fd-43c4-8aa5-c84a1441028d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.264352] env[61998]: ERROR nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Traceback (most recent call last): [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] yield resources [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self.driver.spawn(context, instance, image_meta, [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] vm_ref = self.build_virtual_machine(instance, [ 723.264352] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] vif_infos = vmwarevif.get_vif_info(self._session, [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] for vif in network_info: [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] return self._sync_wrapper(fn, *args, **kwargs) [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self.wait() [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self[:] = self._gt.wait() [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] return self._exit_event.wait() [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 723.264783] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] current.throw(*self._exc) [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] result = function(*args, **kwargs) [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] return func(*args, **kwargs) [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] raise e [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] nwinfo = self.network_api.allocate_for_instance( [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] created_port_ids = self._update_ports_for_instance( [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] with excutils.save_and_reraise_exception(): [ 723.265277] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self.force_reraise() [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] raise self.value [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] updated_port = self._update_port( [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] _ensure_no_port_binding_failure(port) [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] raise exception.PortBindingFailed(port_id=port['id']) [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] nova.exception.PortBindingFailed: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. [ 723.265748] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] [ 723.265748] env[61998]: INFO nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Terminating instance [ 723.266784] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Acquiring lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.266953] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Acquired lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.267763] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 723.355630] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.876s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.356259] env[61998]: ERROR nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Traceback (most recent call last): [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self.driver.spawn(context, instance, image_meta, [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] vm_ref = self.build_virtual_machine(instance, [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] vif_infos = vmwarevif.get_vif_info(self._session, [ 723.356259] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] for vif in network_info: [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return self._sync_wrapper(fn, *args, **kwargs) [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self.wait() [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self[:] = self._gt.wait() [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return self._exit_event.wait() [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] result = hub.switch() [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 723.356664] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return self.greenlet.switch() [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] result = function(*args, **kwargs) [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] return func(*args, **kwargs) [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] raise e [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] nwinfo = self.network_api.allocate_for_instance( [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] created_port_ids = self._update_ports_for_instance( [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] with excutils.save_and_reraise_exception(): [ 723.357163] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] self.force_reraise() [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] raise self.value [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] updated_port = self._update_port( [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] _ensure_no_port_binding_failure(port) [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] raise exception.PortBindingFailed(port_id=port['id']) [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] nova.exception.PortBindingFailed: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. [ 723.357594] env[61998]: ERROR nova.compute.manager [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] [ 723.357963] env[61998]: DEBUG nova.compute.utils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 723.358363] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.394s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.359899] env[61998]: INFO nova.compute.claims [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.362518] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Build of instance a8101e8d-55d0-4f70-9119-f5e176ba8212 was re-scheduled: Binding failed for port 0878c0d8-e560-4f55-bbea-86c59a754ba1, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 723.362922] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 723.363152] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Acquiring lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.363296] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Acquired lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.363451] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 723.636748] env[61998]: INFO nova.compute.manager [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] [instance: 9da95edb-f9fb-40f3-9317-d27f1bae0ecf] Took 1.03 seconds to deallocate network for instance. [ 723.785416] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.864337] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.881764] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.970971] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.368495] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Releasing lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.369085] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 724.369408] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35967501-d80e-483c-b687-c43ef83c6b15 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.381172] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152c931d-ac7d-43fc-80b4-9ba6f27d9acd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.405698] env[61998]: WARNING nova.virt.vmwareapi.driver [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 5f205b7d-d93e-436d-9d7d-04c6f767f7ad could not be found. [ 724.405905] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 724.408055] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b391301-03c7-42e9-b91e-ffb169053ccb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.415580] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df632f82-cbf0-4700-bfa4-328084ab7dc6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.438305] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f205b7d-d93e-436d-9d7d-04c6f767f7ad could not be found. [ 724.438534] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.438710] env[61998]: INFO nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Took 0.07 seconds to destroy the instance on the hypervisor. [ 724.438951] env[61998]: DEBUG oslo.service.loopingcall [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.441273] env[61998]: DEBUG nova.compute.manager [-] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 724.441273] env[61998]: DEBUG nova.network.neutron [-] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 724.457154] env[61998]: DEBUG nova.network.neutron [-] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.473427] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Releasing lock "refresh_cache-a8101e8d-55d0-4f70-9119-f5e176ba8212" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.473646] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 724.473820] env[61998]: DEBUG nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 724.473979] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 724.495880] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.669304] env[61998]: INFO nova.scheduler.client.report [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Deleted allocations for instance 9da95edb-f9fb-40f3-9317-d27f1bae0ecf [ 724.717017] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40086597-ecc4-4a53-9978-cd9b758af7fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.724571] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973d2bd4-6ba7-4a9b-92af-22ae79603f8b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.754539] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0be1a17-65f0-45d7-9b0e-0eccd32bc15d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.762220] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06403baf-520d-4fcd-a679-1429c66432c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.776555] env[61998]: DEBUG nova.compute.provider_tree [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.959884] env[61998]: DEBUG nova.network.neutron [-] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.998601] env[61998]: DEBUG nova.network.neutron [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.177471] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9251ac00-c949-49ae-b261-cb82a79c6f51 tempest-SecurityGroupsTestJSON-984357098 tempest-SecurityGroupsTestJSON-984357098-project-member] Lock "9da95edb-f9fb-40f3-9317-d27f1bae0ecf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.548s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.279292] env[61998]: DEBUG nova.scheduler.client.report [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 725.461691] env[61998]: INFO nova.compute.manager [-] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Took 1.02 seconds to deallocate network for instance. [ 725.501661] env[61998]: INFO nova.compute.manager [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] [instance: a8101e8d-55d0-4f70-9119-f5e176ba8212] Took 1.03 seconds to deallocate network for instance. [ 725.681022] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 725.783847] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.784421] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 725.787671] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.598s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.789245] env[61998]: INFO nova.compute.claims [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.014531] env[61998]: INFO nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Took 0.55 seconds to detach 1 volumes for instance. [ 726.016647] env[61998]: DEBUG nova.compute.claims [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 726.016818] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.204021] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.293890] env[61998]: DEBUG nova.compute.utils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.297408] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 726.297576] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 726.351385] env[61998]: DEBUG nova.policy [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'daa8327bd6424f48801bd9479aa1a9d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08e97324fd454c64baa0cde923a27612', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 726.532965] env[61998]: INFO nova.scheduler.client.report [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Deleted allocations for instance a8101e8d-55d0-4f70-9119-f5e176ba8212 [ 726.762047] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Successfully created port: 88a38d4c-6e6e-44b6-bd38-68673f87c6a7 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.800963] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 727.042602] env[61998]: DEBUG oslo_concurrency.lockutils [None req-13688a74-cfc3-49b7-bdd0-ee2778e08d49 tempest-ServerAddressesTestJSON-1830239892 tempest-ServerAddressesTestJSON-1830239892-project-member] Lock "a8101e8d-55d0-4f70-9119-f5e176ba8212" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 145.988s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.152759] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e8cdd9-d60c-4829-b7ad-5cde678e4cbc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.160983] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adf7752-24c3-4330-a9f1-2e51d58bf4de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.192262] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b190544-f3fd-4851-a6af-76e79a7baf1d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.200680] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f37d511-803e-494e-8719-ca9b22e32660 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.214310] env[61998]: DEBUG nova.compute.provider_tree [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.549402] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 727.720846] env[61998]: DEBUG nova.scheduler.client.report [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 727.813112] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 727.851820] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.851820] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.851820] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.852065] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.852065] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.852545] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.852894] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.853191] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.856017] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.856017] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.856017] env[61998]: DEBUG nova.virt.hardware [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.856017] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c728436-dd26-4309-a45c-f514a68bcb71 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.866479] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09265e52-34d7-47c6-a4bc-505471a30e79 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.072938] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.194384] env[61998]: DEBUG nova.compute.manager [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Received event network-changed-88a38d4c-6e6e-44b6-bd38-68673f87c6a7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 728.194591] env[61998]: DEBUG nova.compute.manager [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Refreshing instance network info cache due to event network-changed-88a38d4c-6e6e-44b6-bd38-68673f87c6a7. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 728.194801] env[61998]: DEBUG oslo_concurrency.lockutils [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] Acquiring lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.194939] env[61998]: DEBUG oslo_concurrency.lockutils [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] Acquired lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.195599] env[61998]: DEBUG nova.network.neutron [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Refreshing network info cache for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 728.226533] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.227250] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 728.229909] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.563s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.506292] env[61998]: ERROR nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. [ 728.506292] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 728.506292] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 728.506292] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 728.506292] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.506292] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 728.506292] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.506292] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 728.506292] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.506292] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 728.506292] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.506292] env[61998]: ERROR nova.compute.manager raise self.value [ 728.506292] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.506292] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 728.506292] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.506292] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 728.506838] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.506838] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 728.506838] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. [ 728.506838] env[61998]: ERROR nova.compute.manager [ 728.506838] env[61998]: Traceback (most recent call last): [ 728.506838] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 728.506838] env[61998]: listener.cb(fileno) [ 728.506838] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.506838] env[61998]: result = function(*args, **kwargs) [ 728.506838] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.506838] env[61998]: return func(*args, **kwargs) [ 728.506838] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 728.506838] env[61998]: raise e [ 728.506838] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 728.506838] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 728.506838] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.506838] env[61998]: created_port_ids = self._update_ports_for_instance( [ 728.506838] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.506838] env[61998]: with excutils.save_and_reraise_exception(): [ 728.506838] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.506838] env[61998]: self.force_reraise() [ 728.506838] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.506838] env[61998]: raise self.value [ 728.506838] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.506838] env[61998]: updated_port = self._update_port( [ 728.506838] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.506838] env[61998]: _ensure_no_port_binding_failure(port) [ 728.506838] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.506838] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 728.507759] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. [ 728.507759] env[61998]: Removing descriptor: 15 [ 728.507759] env[61998]: ERROR nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Traceback (most recent call last): [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] yield resources [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self.driver.spawn(context, instance, image_meta, [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.507759] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] vm_ref = self.build_virtual_machine(instance, [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] for vif in network_info: [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return self._sync_wrapper(fn, *args, **kwargs) [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self.wait() [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self[:] = self._gt.wait() [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return self._exit_event.wait() [ 728.508194] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] result = hub.switch() [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return self.greenlet.switch() [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] result = function(*args, **kwargs) [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return func(*args, **kwargs) [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] raise e [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] nwinfo = self.network_api.allocate_for_instance( [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.508615] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] created_port_ids = self._update_ports_for_instance( [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] with excutils.save_and_reraise_exception(): [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self.force_reraise() [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] raise self.value [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] updated_port = self._update_port( [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] _ensure_no_port_binding_failure(port) [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.509178] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] raise exception.PortBindingFailed(port_id=port['id']) [ 728.509591] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] nova.exception.PortBindingFailed: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. [ 728.509591] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] [ 728.509591] env[61998]: INFO nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Terminating instance [ 728.512293] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Acquiring lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.735033] env[61998]: DEBUG nova.compute.utils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 728.739990] env[61998]: DEBUG nova.network.neutron [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.741773] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 728.741969] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 728.799335] env[61998]: DEBUG nova.policy [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f249ad827b244a8891233183bed4a38', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e50edf6017d84278bf6680fdd73a9639', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 728.948439] env[61998]: DEBUG nova.network.neutron [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.155093] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0903aa-81e0-403a-9f6d-c8639bc78f13 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.167358] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e307ace0-7a1f-4dfd-9124-83063bddbf80 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.171634] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Successfully created port: 2e590a46-aa26-4965-bc67-bd8f267fdd6e {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 729.204042] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af3c95c-4e3f-4945-87c4-77192f43bb78 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.215426] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9270b1-60a9-425c-bd6f-628cd5457808 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.231627] env[61998]: DEBUG nova.compute.provider_tree [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.240981] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 729.451367] env[61998]: DEBUG oslo_concurrency.lockutils [req-cd527f6b-7f28-4749-813f-77d63c92045e req-201ce28e-3483-4b56-aa82-9c8a070a2b20 service nova] Releasing lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.451916] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Acquired lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.451988] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.736093] env[61998]: DEBUG nova.scheduler.client.report [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 729.981329] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.093809] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.189503] env[61998]: DEBUG nova.compute.manager [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Received event network-changed-2e590a46-aa26-4965-bc67-bd8f267fdd6e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 730.189753] env[61998]: DEBUG nova.compute.manager [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Refreshing instance network info cache due to event network-changed-2e590a46-aa26-4965-bc67-bd8f267fdd6e. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 730.189986] env[61998]: DEBUG oslo_concurrency.lockutils [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] Acquiring lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.190107] env[61998]: DEBUG oslo_concurrency.lockutils [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] Acquired lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.190217] env[61998]: DEBUG nova.network.neutron [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Refreshing network info cache for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.224648] env[61998]: DEBUG nova.compute.manager [req-6b107a1b-a3d3-478a-9dd6-9b840ee5dece req-62fa2647-a283-4be1-b056-b17def66db32 service nova] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Received event network-vif-deleted-88a38d4c-6e6e-44b6-bd38-68673f87c6a7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 730.239692] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.010s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.240328] env[61998]: ERROR nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Traceback (most recent call last): [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self.driver.spawn(context, instance, image_meta, [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] vm_ref = self.build_virtual_machine(instance, [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] vif_infos = vmwarevif.get_vif_info(self._session, [ 730.240328] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] for vif in network_info: [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] return self._sync_wrapper(fn, *args, **kwargs) [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self.wait() [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self[:] = self._gt.wait() [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] return self._exit_event.wait() [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] current.throw(*self._exc) [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 730.240817] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] result = function(*args, **kwargs) [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] return func(*args, **kwargs) [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] raise e [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] nwinfo = self.network_api.allocate_for_instance( [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] created_port_ids = self._update_ports_for_instance( [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] with excutils.save_and_reraise_exception(): [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] self.force_reraise() [ 730.241263] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] raise self.value [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] updated_port = self._update_port( [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] _ensure_no_port_binding_failure(port) [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] raise exception.PortBindingFailed(port_id=port['id']) [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] nova.exception.PortBindingFailed: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. [ 730.241702] env[61998]: ERROR nova.compute.manager [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] [ 730.241702] env[61998]: DEBUG nova.compute.utils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 730.242699] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.718s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.246102] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Build of instance 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e was re-scheduled: Binding failed for port b9ef6b1d-c010-4647-9039-78c7f742f0a7, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 730.246520] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 730.246725] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Acquiring lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.246862] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Acquired lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.247019] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.252265] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 730.282899] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 730.283523] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 730.283523] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.283770] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 730.283770] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.284018] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 730.289818] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 730.289818] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 730.289818] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 730.289818] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 730.289818] env[61998]: DEBUG nova.virt.hardware [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 730.289984] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a395de92-8424-4c05-9b8a-4c9b8ab2c0af {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.299120] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74d37f1-7f66-4e89-a904-b60ec6485aba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.466642] env[61998]: ERROR nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. [ 730.466642] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 730.466642] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 730.466642] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 730.466642] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 730.466642] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 730.466642] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 730.466642] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 730.466642] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.466642] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 730.466642] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.466642] env[61998]: ERROR nova.compute.manager raise self.value [ 730.466642] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 730.466642] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 730.466642] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.466642] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 730.467287] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.467287] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 730.467287] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. [ 730.467287] env[61998]: ERROR nova.compute.manager [ 730.467287] env[61998]: Traceback (most recent call last): [ 730.467287] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 730.467287] env[61998]: listener.cb(fileno) [ 730.467287] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 730.467287] env[61998]: result = function(*args, **kwargs) [ 730.467287] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 730.467287] env[61998]: return func(*args, **kwargs) [ 730.467287] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 730.467287] env[61998]: raise e [ 730.467287] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 730.467287] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 730.467287] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 730.467287] env[61998]: created_port_ids = self._update_ports_for_instance( [ 730.467287] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 730.467287] env[61998]: with excutils.save_and_reraise_exception(): [ 730.467287] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.467287] env[61998]: self.force_reraise() [ 730.467287] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.467287] env[61998]: raise self.value [ 730.467287] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 730.467287] env[61998]: updated_port = self._update_port( [ 730.467287] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.467287] env[61998]: _ensure_no_port_binding_failure(port) [ 730.467287] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.467287] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 730.468384] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. [ 730.468384] env[61998]: Removing descriptor: 17 [ 730.468384] env[61998]: ERROR nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Traceback (most recent call last): [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] yield resources [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self.driver.spawn(context, instance, image_meta, [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 730.468384] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] vm_ref = self.build_virtual_machine(instance, [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] vif_infos = vmwarevif.get_vif_info(self._session, [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] for vif in network_info: [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return self._sync_wrapper(fn, *args, **kwargs) [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self.wait() [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self[:] = self._gt.wait() [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return self._exit_event.wait() [ 730.468799] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] result = hub.switch() [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return self.greenlet.switch() [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] result = function(*args, **kwargs) [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return func(*args, **kwargs) [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] raise e [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] nwinfo = self.network_api.allocate_for_instance( [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 730.469377] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] created_port_ids = self._update_ports_for_instance( [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] with excutils.save_and_reraise_exception(): [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self.force_reraise() [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] raise self.value [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] updated_port = self._update_port( [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] _ensure_no_port_binding_failure(port) [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.469951] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] raise exception.PortBindingFailed(port_id=port['id']) [ 730.470527] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] nova.exception.PortBindingFailed: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. [ 730.470527] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] [ 730.470527] env[61998]: INFO nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Terminating instance [ 730.470921] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Acquiring lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.600727] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Releasing lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.601233] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 730.601414] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.601729] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1fb7c922-ecd6-4f02-85e2-79f8183e15b2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.610965] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14710975-6ce4-4609-aa57-fddc9363a714 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.633691] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dce49aac-03f3-48ed-9bad-c5eb2d779bae could not be found. [ 730.633914] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.634184] env[61998]: INFO nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Took 0.03 seconds to destroy the instance on the hypervisor. [ 730.634448] env[61998]: DEBUG oslo.service.loopingcall [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.634918] env[61998]: DEBUG nova.compute.manager [-] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 730.635041] env[61998]: DEBUG nova.network.neutron [-] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.650153] env[61998]: DEBUG nova.network.neutron [-] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.719482] env[61998]: DEBUG nova.network.neutron [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.806992] env[61998]: DEBUG nova.network.neutron [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.935250] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.065522] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.132777] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfad5cf-4cb6-4558-82b4-04c6f3784a24 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.140420] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f0ced1-b739-4fa0-ac35-b7bbbe539a44 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.171053] env[61998]: DEBUG nova.network.neutron [-] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.171053] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec93e19e-ec7c-4d61-b6cf-6938269eec3a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.177796] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159c2c47-0dd2-485c-afb9-2c58eee42798 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.190806] env[61998]: DEBUG nova.compute.provider_tree [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.310771] env[61998]: DEBUG oslo_concurrency.lockutils [req-878844e7-7eed-4734-8617-87b8243a77c9 req-d71e97c3-f2b7-4136-811b-b119da3a62f7 service nova] Releasing lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.311170] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Acquired lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.311359] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.571443] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Releasing lock "refresh_cache-4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.571724] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 731.571955] env[61998]: DEBUG nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 731.572143] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.594263] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.674472] env[61998]: INFO nova.compute.manager [-] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Took 1.04 seconds to deallocate network for instance. [ 731.676743] env[61998]: DEBUG nova.compute.claims [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 731.676922] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.693870] env[61998]: DEBUG nova.scheduler.client.report [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 731.832407] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.963641] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.097090] env[61998]: DEBUG nova.network.neutron [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.199033] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.956s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.199672] env[61998]: ERROR nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Traceback (most recent call last): [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self.driver.spawn(context, instance, image_meta, [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] vm_ref = self.build_virtual_machine(instance, [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] vif_infos = vmwarevif.get_vif_info(self._session, [ 732.199672] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] for vif in network_info: [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] return self._sync_wrapper(fn, *args, **kwargs) [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self.wait() [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self[:] = self._gt.wait() [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] return self._exit_event.wait() [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] current.throw(*self._exc) [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.200027] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] result = function(*args, **kwargs) [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] return func(*args, **kwargs) [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] raise e [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] nwinfo = self.network_api.allocate_for_instance( [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] created_port_ids = self._update_ports_for_instance( [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] with excutils.save_and_reraise_exception(): [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] self.force_reraise() [ 732.200398] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] raise self.value [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] updated_port = self._update_port( [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] _ensure_no_port_binding_failure(port) [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] raise exception.PortBindingFailed(port_id=port['id']) [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] nova.exception.PortBindingFailed: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. [ 732.200786] env[61998]: ERROR nova.compute.manager [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] [ 732.200786] env[61998]: DEBUG nova.compute.utils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 732.201626] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.377s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.201870] env[61998]: DEBUG nova.objects.instance [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lazy-loading 'resources' on Instance uuid f0a011bb-4939-4384-885c-6ce482875b4e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 732.203120] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Build of instance f163fb1b-400f-4abb-8df6-0d9ea6449166 was re-scheduled: Binding failed for port d13dd1e8-fbf8-42c6-b8e7-140f0d6f7ada, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 732.203530] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 732.203750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Acquiring lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.203897] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Acquired lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.204068] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.245365] env[61998]: DEBUG nova.compute.manager [req-0e32045d-b224-4385-8475-d6d019c11c68 req-dc5812de-9716-449a-89c9-b4bf78bbc537 service nova] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Received event network-vif-deleted-2e590a46-aa26-4965-bc67-bd8f267fdd6e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 732.468025] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Releasing lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.468025] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 732.468025] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 732.468202] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-658bf56e-efb1-42ec-a3e3-c3c5e259d03c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.477414] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d62a5a9-93dc-4baf-b509-75d3ed977d21 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.499461] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87f859c6-7a96-4a48-adb8-814a134ad4c8 could not be found. [ 732.499689] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.500021] env[61998]: INFO nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 732.501061] env[61998]: DEBUG oslo.service.loopingcall [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.501061] env[61998]: DEBUG nova.compute.manager [-] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 732.501061] env[61998]: DEBUG nova.network.neutron [-] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.532220] env[61998]: DEBUG nova.network.neutron [-] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.600204] env[61998]: INFO nova.compute.manager [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] [instance: 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e] Took 1.03 seconds to deallocate network for instance. [ 732.728305] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.879711] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.035094] env[61998]: DEBUG nova.network.neutron [-] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.131021] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928434ae-5891-4ee6-8026-7a0eb13a9ec9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.141019] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6078565-a5b6-4faf-8f2f-d1a330711dd9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.171090] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603f9abc-0cf2-40bc-a800-21a950ef4526 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.179325] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f67481-1df8-4373-918d-1bdd7c157c4f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.194315] env[61998]: DEBUG nova.compute.provider_tree [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.382711] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Releasing lock "refresh_cache-f163fb1b-400f-4abb-8df6-0d9ea6449166" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.382979] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 733.383231] env[61998]: DEBUG nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 733.383425] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.399725] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.538110] env[61998]: INFO nova.compute.manager [-] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Took 1.04 seconds to deallocate network for instance. [ 733.540398] env[61998]: DEBUG nova.compute.claims [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 733.540610] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.649864] env[61998]: INFO nova.scheduler.client.report [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Deleted allocations for instance 4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e [ 733.697248] env[61998]: DEBUG nova.scheduler.client.report [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 733.902171] env[61998]: DEBUG nova.network.neutron [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.161854] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8c9239e0-5318-4d4a-8aed-757f97f207d9 tempest-ServerActionsTestJSON-997280155 tempest-ServerActionsTestJSON-997280155-project-member] Lock "4d167f54-c1fc-4eb0-a4cc-a5edb60fe78e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.680s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.202792] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.208016] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.442s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.209953] env[61998]: INFO nova.compute.claims [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.222646] env[61998]: INFO nova.scheduler.client.report [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Deleted allocations for instance f0a011bb-4939-4384-885c-6ce482875b4e [ 734.369463] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Acquiring lock "169437f2-fb18-4d5c-8d00-b82e9e5752d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.369711] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Lock "169437f2-fb18-4d5c-8d00-b82e9e5752d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.405240] env[61998]: INFO nova.compute.manager [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] [instance: f163fb1b-400f-4abb-8df6-0d9ea6449166] Took 1.02 seconds to deallocate network for instance. [ 734.668896] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 734.731138] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02f39a7f-6711-4beb-aa95-30b275b6e9f3 tempest-ServersAaction247Test-75876596 tempest-ServersAaction247Test-75876596-project-member] Lock "f0a011bb-4939-4384-885c-6ce482875b4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.588s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.199817] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.349714] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.349928] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.438934] env[61998]: INFO nova.scheduler.client.report [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Deleted allocations for instance f163fb1b-400f-4abb-8df6-0d9ea6449166 [ 735.609858] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448d0aca-b49e-4a0f-9888-448ee649d710 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.617584] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb65329f-fde1-4d28-a9e7-4616812ce7a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.648143] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474d7f73-4932-4b68-b219-b3c375677252 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.656267] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17eaf164-6a51-4829-8cda-876e8b0d180b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.672021] env[61998]: DEBUG nova.compute.provider_tree [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.859152] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.859529] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 735.951961] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47719f06-76cb-4756-a560-7ec18d230570 tempest-InstanceActionsTestJSON-370054379 tempest-InstanceActionsTestJSON-370054379-project-member] Lock "f163fb1b-400f-4abb-8df6-0d9ea6449166" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.879s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.171573] env[61998]: DEBUG nova.scheduler.client.report [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 736.363328] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Didn't find any instances for network info cache update. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 736.363528] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.364323] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.364606] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.364772] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.364919] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.365088] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.365219] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 736.365369] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.455021] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 736.678062] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.678614] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 736.687661] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.496s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.868686] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.983934] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.187196] env[61998]: DEBUG nova.compute.utils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.194022] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 737.194022] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.317243] env[61998]: DEBUG nova.policy [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25919d91b6fe4f31a85b4109149e261b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df09ba4531ae4b1e8e83f9b382b82c5c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 737.591017] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adb368e-6638-485d-bcce-8434c99deac2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.598773] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3db198-e479-427a-be7e-b41f1d7c54c6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.639403] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2818a0-e999-4578-9932-5ce096f08d66 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.646978] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5a3628-024b-47a3-b3ee-e7e9f945d9f3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.662457] env[61998]: DEBUG nova.compute.provider_tree [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.692598] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 738.104691] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Successfully created port: ae530f13-5c15-41af-927d-17c2acf2a6fa {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.165102] env[61998]: DEBUG nova.scheduler.client.report [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 738.673774] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.991s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.674909] env[61998]: ERROR nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Traceback (most recent call last): [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self.driver.spawn(context, instance, image_meta, [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] vm_ref = self.build_virtual_machine(instance, [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.674909] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] for vif in network_info: [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] return self._sync_wrapper(fn, *args, **kwargs) [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self.wait() [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self[:] = self._gt.wait() [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] return self._exit_event.wait() [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] current.throw(*self._exc) [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.675332] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] result = function(*args, **kwargs) [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] return func(*args, **kwargs) [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] raise e [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] nwinfo = self.network_api.allocate_for_instance( [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] created_port_ids = self._update_ports_for_instance( [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] with excutils.save_and_reraise_exception(): [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] self.force_reraise() [ 738.675710] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] raise self.value [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] updated_port = self._update_port( [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] _ensure_no_port_binding_failure(port) [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] raise exception.PortBindingFailed(port_id=port['id']) [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] nova.exception.PortBindingFailed: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. [ 738.676091] env[61998]: ERROR nova.compute.manager [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] [ 738.676091] env[61998]: DEBUG nova.compute.utils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 738.677135] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.365s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.680324] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Build of instance 9025d114-10da-4cf8-9e5f-2520bfd3b246 was re-scheduled: Binding failed for port 12a0f3b3-5d1a-4696-91f3-13095628d816, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 738.680768] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 738.681063] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Acquiring lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.681219] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Acquired lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.681415] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.702897] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 738.746119] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 738.746119] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 738.746119] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.746284] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 738.746284] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.746284] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 738.746284] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 738.746617] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 738.748204] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 738.748204] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 738.748204] env[61998]: DEBUG nova.virt.hardware [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.749259] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403ba2ca-0003-42ed-a185-4d70c5c0b5d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.759019] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605a435f-115a-4174-9b46-50f820dc3ef6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.241147] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.389422] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.563790] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d842c283-c08b-428d-a305-7c4705fb8550 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.571186] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8225ea-e589-4c9c-88ac-a0961a04bbc2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.602839] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ed3f39-5a9c-451a-9b8f-36536a68a607 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.610390] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4276b911-deb3-4582-ae90-d1e10e9c2470 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.623701] env[61998]: DEBUG nova.compute.provider_tree [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.892626] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Releasing lock "refresh_cache-9025d114-10da-4cf8-9e5f-2520bfd3b246" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.893570] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 739.893570] env[61998]: DEBUG nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 739.893570] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.930523] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.946092] env[61998]: DEBUG nova.compute.manager [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Received event network-changed-ae530f13-5c15-41af-927d-17c2acf2a6fa {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 739.946313] env[61998]: DEBUG nova.compute.manager [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Refreshing instance network info cache due to event network-changed-ae530f13-5c15-41af-927d-17c2acf2a6fa. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 739.946538] env[61998]: DEBUG oslo_concurrency.lockutils [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] Acquiring lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.946722] env[61998]: DEBUG oslo_concurrency.lockutils [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] Acquired lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.946929] env[61998]: DEBUG nova.network.neutron [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Refreshing network info cache for port ae530f13-5c15-41af-927d-17c2acf2a6fa {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 740.035331] env[61998]: ERROR nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. [ 740.035331] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 740.035331] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.035331] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 740.035331] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.035331] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 740.035331] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.035331] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 740.035331] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.035331] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 740.035331] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.035331] env[61998]: ERROR nova.compute.manager raise self.value [ 740.035331] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.035331] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 740.035331] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.035331] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 740.035829] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.035829] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 740.035829] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. [ 740.035829] env[61998]: ERROR nova.compute.manager [ 740.035829] env[61998]: Traceback (most recent call last): [ 740.035829] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 740.035829] env[61998]: listener.cb(fileno) [ 740.035829] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.035829] env[61998]: result = function(*args, **kwargs) [ 740.035829] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.035829] env[61998]: return func(*args, **kwargs) [ 740.035829] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 740.035829] env[61998]: raise e [ 740.035829] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.035829] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 740.035829] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.035829] env[61998]: created_port_ids = self._update_ports_for_instance( [ 740.035829] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.035829] env[61998]: with excutils.save_and_reraise_exception(): [ 740.035829] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.035829] env[61998]: self.force_reraise() [ 740.035829] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.035829] env[61998]: raise self.value [ 740.035829] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.035829] env[61998]: updated_port = self._update_port( [ 740.035829] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.035829] env[61998]: _ensure_no_port_binding_failure(port) [ 740.035829] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.035829] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 740.036693] env[61998]: nova.exception.PortBindingFailed: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. [ 740.036693] env[61998]: Removing descriptor: 15 [ 740.036693] env[61998]: ERROR nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Traceback (most recent call last): [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] yield resources [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self.driver.spawn(context, instance, image_meta, [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.036693] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] vm_ref = self.build_virtual_machine(instance, [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] for vif in network_info: [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return self._sync_wrapper(fn, *args, **kwargs) [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self.wait() [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self[:] = self._gt.wait() [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return self._exit_event.wait() [ 740.037247] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] result = hub.switch() [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return self.greenlet.switch() [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] result = function(*args, **kwargs) [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return func(*args, **kwargs) [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] raise e [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] nwinfo = self.network_api.allocate_for_instance( [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.037683] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] created_port_ids = self._update_ports_for_instance( [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] with excutils.save_and_reraise_exception(): [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self.force_reraise() [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] raise self.value [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] updated_port = self._update_port( [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] _ensure_no_port_binding_failure(port) [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.038196] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] raise exception.PortBindingFailed(port_id=port['id']) [ 740.038582] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] nova.exception.PortBindingFailed: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. [ 740.038582] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] [ 740.038582] env[61998]: INFO nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Terminating instance [ 740.039516] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.126465] env[61998]: DEBUG nova.scheduler.client.report [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 740.432772] env[61998]: DEBUG nova.network.neutron [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.526273] env[61998]: DEBUG nova.network.neutron [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.632047] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.955s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.632678] env[61998]: ERROR nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Traceback (most recent call last): [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self.driver.spawn(context, instance, image_meta, [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] vm_ref = self.build_virtual_machine(instance, [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.632678] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] for vif in network_info: [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] return self._sync_wrapper(fn, *args, **kwargs) [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self.wait() [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self[:] = self._gt.wait() [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] return self._exit_event.wait() [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] current.throw(*self._exc) [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.633293] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] result = function(*args, **kwargs) [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] return func(*args, **kwargs) [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] raise e [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] nwinfo = self.network_api.allocate_for_instance( [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] created_port_ids = self._update_ports_for_instance( [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] with excutils.save_and_reraise_exception(): [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] self.force_reraise() [ 740.634296] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] raise self.value [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] updated_port = self._update_port( [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] _ensure_no_port_binding_failure(port) [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] raise exception.PortBindingFailed(port_id=port['id']) [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] nova.exception.PortBindingFailed: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. [ 740.634998] env[61998]: ERROR nova.compute.manager [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] [ 740.634998] env[61998]: DEBUG nova.compute.utils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 740.635464] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.618s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.638994] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Build of instance 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179 was re-scheduled: Binding failed for port 96270278-0740-4f05-9455-036a4f52f677, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 740.639448] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 740.639667] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Acquiring lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.639976] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Acquired lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.640172] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.678629] env[61998]: DEBUG nova.network.neutron [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.936610] env[61998]: INFO nova.compute.manager [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] [instance: 9025d114-10da-4cf8-9e5f-2520bfd3b246] Took 1.04 seconds to deallocate network for instance. [ 741.175425] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.179265] env[61998]: DEBUG oslo_concurrency.lockutils [req-3240cded-fae5-40e0-861b-b65358d62624 req-04e895c5-99f6-401d-aca0-1828ecdbd7b4 service nova] Releasing lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.179631] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.180011] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.355862] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.543766] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbb007e-d96e-46b7-84ad-e9416c037964 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.552398] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6b5b00-590d-464c-a3e2-ed95e41d9c5a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.591080] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc5d0fd-b544-4ab0-a384-c472c42f0d13 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.598880] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc69390-2e30-404f-a4ca-0587c2e79708 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.613140] env[61998]: DEBUG nova.compute.provider_tree [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.710982] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.850274] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.858452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Releasing lock "refresh_cache-62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.858919] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 741.859145] env[61998]: DEBUG nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 741.859320] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.875793] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.982464] env[61998]: INFO nova.scheduler.client.report [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Deleted allocations for instance 9025d114-10da-4cf8-9e5f-2520bfd3b246 [ 742.001072] env[61998]: DEBUG nova.compute.manager [req-d6c45cf0-b173-4425-a2d6-046df22d8401 req-de06976b-a459-4bad-83f7-4554ea6fc21d service nova] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Received event network-vif-deleted-ae530f13-5c15-41af-927d-17c2acf2a6fa {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 742.042507] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.042742] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.116678] env[61998]: DEBUG nova.scheduler.client.report [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 742.354348] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Releasing lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.354784] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 742.354980] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.355400] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a1837627-810c-42d9-9908-936fbacd6405 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.364855] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f370842-2d22-4cbf-a039-d3455e14d89b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.378243] env[61998]: DEBUG nova.network.neutron [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.388528] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bc1ef57d-457d-446a-8ad4-3bab6d331215 could not be found. [ 742.388752] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.388972] env[61998]: INFO nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Took 0.03 seconds to destroy the instance on the hypervisor. [ 742.389259] env[61998]: DEBUG oslo.service.loopingcall [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.390021] env[61998]: DEBUG nova.compute.manager [-] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 742.390123] env[61998]: DEBUG nova.network.neutron [-] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.409194] env[61998]: DEBUG nova.network.neutron [-] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.498400] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48658a3e-539d-49d0-bc04-d8186d33239f tempest-ServersTestBootFromVolume-304873699 tempest-ServersTestBootFromVolume-304873699-project-member] Lock "9025d114-10da-4cf8-9e5f-2520bfd3b246" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.691s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.622265] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.987s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.622885] env[61998]: ERROR nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Traceback (most recent call last): [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self.driver.spawn(context, instance, image_meta, [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] vm_ref = self.build_virtual_machine(instance, [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] vif_infos = vmwarevif.get_vif_info(self._session, [ 742.622885] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] for vif in network_info: [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] return self._sync_wrapper(fn, *args, **kwargs) [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self.wait() [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self[:] = self._gt.wait() [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] return self._exit_event.wait() [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] current.throw(*self._exc) [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.623284] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] result = function(*args, **kwargs) [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] return func(*args, **kwargs) [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] raise e [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] nwinfo = self.network_api.allocate_for_instance( [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] created_port_ids = self._update_ports_for_instance( [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] with excutils.save_and_reraise_exception(): [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] self.force_reraise() [ 742.623692] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] raise self.value [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] updated_port = self._update_port( [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] _ensure_no_port_binding_failure(port) [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] raise exception.PortBindingFailed(port_id=port['id']) [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] nova.exception.PortBindingFailed: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. [ 742.624141] env[61998]: ERROR nova.compute.manager [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] [ 742.624141] env[61998]: DEBUG nova.compute.utils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 742.624924] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.421s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.626435] env[61998]: INFO nova.compute.claims [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.629349] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Build of instance 5f205b7d-d93e-436d-9d7d-04c6f767f7ad was re-scheduled: Binding failed for port d0ded777-e6e9-4fe3-84d7-e2da98acad08, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 742.629499] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 742.629638] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Acquiring lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.629775] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Acquired lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.633024] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.881354] env[61998]: INFO nova.compute.manager [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] [instance: 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179] Took 1.02 seconds to deallocate network for instance. [ 742.911891] env[61998]: DEBUG nova.network.neutron [-] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.003049] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 743.161027] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.275719] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.414360] env[61998]: INFO nova.compute.manager [-] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Took 1.02 seconds to deallocate network for instance. [ 743.416669] env[61998]: DEBUG nova.compute.claims [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 743.416850] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.537945] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.777731] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Releasing lock "refresh_cache-5f205b7d-d93e-436d-9d7d-04c6f767f7ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.777879] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 743.778692] env[61998]: DEBUG nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 743.778692] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.793911] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.915145] env[61998]: INFO nova.scheduler.client.report [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Deleted allocations for instance 62ebc97c-3f86-4ef2-b5c4-3d2fe563d179 [ 743.951701] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76257e66-3c49-4746-8456-ec280f2b2eae {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.960525] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e243f41e-68dd-4ead-ad80-0ac9bcd2a0f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.992465] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113d2b58-fce4-49a2-987a-273c1fe2ee6c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.999505] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf23b129-8ed6-43e9-aa76-3ab892c1714f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.014769] env[61998]: DEBUG nova.compute.provider_tree [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.297280] env[61998]: DEBUG nova.network.neutron [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.425321] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2a2a9f4-92b9-4271-9c62-62b914c382af tempest-ServersV294TestFqdnHostnames-974670999 tempest-ServersV294TestFqdnHostnames-974670999-project-member] Lock "62ebc97c-3f86-4ef2-b5c4-3d2fe563d179" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.434s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.518310] env[61998]: DEBUG nova.scheduler.client.report [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 744.801407] env[61998]: INFO nova.compute.manager [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] [instance: 5f205b7d-d93e-436d-9d7d-04c6f767f7ad] Took 1.02 seconds to deallocate network for instance. [ 744.928367] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 745.024109] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.025374] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 745.029931] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.955s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.029931] env[61998]: INFO nova.compute.claims [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.452951] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.534377] env[61998]: DEBUG nova.compute.utils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.537609] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 745.537775] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.625168] env[61998]: DEBUG nova.policy [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45e5c7148ac343ee8674cf6747d7df0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102883704d52434591e74440e02262fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 745.847568] env[61998]: INFO nova.scheduler.client.report [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Deleted allocations for instance 5f205b7d-d93e-436d-9d7d-04c6f767f7ad [ 746.004906] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Successfully created port: 7588c242-9de5-4d72-89db-ba1d0d17e49b {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.038610] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 746.361322] env[61998]: DEBUG oslo_concurrency.lockutils [None req-58544bc8-a608-43a4-9bb6-da598f7e4343 tempest-ServerActionsV293TestJSON-102737778 tempest-ServerActionsV293TestJSON-102737778-project-member] Lock "5f205b7d-d93e-436d-9d7d-04c6f767f7ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.173s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.409310] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c548a1-8a37-4013-85ee-3e960b010e01 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.419587] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5706d6-2b29-4457-b548-ca444ff63938 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.450601] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012ac646-9989-4401-aa26-9eabac6b1626 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.462516] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b67451b-65eb-4387-ba82-e70f6daddc93 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.473875] env[61998]: DEBUG nova.compute.provider_tree [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.865321] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 746.977563] env[61998]: DEBUG nova.scheduler.client.report [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 747.055153] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 747.084019] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.084019] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.084019] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.084019] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.084477] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.085190] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.085190] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.085190] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.085448] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.085511] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.085686] env[61998]: DEBUG nova.virt.hardware [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.086568] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b73f23-6334-4825-9b96-121c3bba4987 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.094466] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2f8f52-6c50-4070-90d1-20aa82136915 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.313400] env[61998]: DEBUG nova.compute.manager [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Received event network-changed-7588c242-9de5-4d72-89db-ba1d0d17e49b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 747.313589] env[61998]: DEBUG nova.compute.manager [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Refreshing instance network info cache due to event network-changed-7588c242-9de5-4d72-89db-ba1d0d17e49b. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 747.313801] env[61998]: DEBUG oslo_concurrency.lockutils [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] Acquiring lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.313942] env[61998]: DEBUG oslo_concurrency.lockutils [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] Acquired lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.314112] env[61998]: DEBUG nova.network.neutron [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Refreshing network info cache for port 7588c242-9de5-4d72-89db-ba1d0d17e49b {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.390778] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.487118] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.487659] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 747.490281] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.813s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.702432] env[61998]: ERROR nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. [ 747.702432] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 747.702432] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 747.702432] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 747.702432] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.702432] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 747.702432] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.702432] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 747.702432] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.702432] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 747.702432] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.702432] env[61998]: ERROR nova.compute.manager raise self.value [ 747.702432] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.702432] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 747.702432] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.702432] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 747.703031] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.703031] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 747.703031] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. [ 747.703031] env[61998]: ERROR nova.compute.manager [ 747.703031] env[61998]: Traceback (most recent call last): [ 747.703031] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 747.703031] env[61998]: listener.cb(fileno) [ 747.703031] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 747.703031] env[61998]: result = function(*args, **kwargs) [ 747.703031] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 747.703031] env[61998]: return func(*args, **kwargs) [ 747.703031] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 747.703031] env[61998]: raise e [ 747.703031] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 747.703031] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 747.703031] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.703031] env[61998]: created_port_ids = self._update_ports_for_instance( [ 747.703031] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.703031] env[61998]: with excutils.save_and_reraise_exception(): [ 747.703031] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.703031] env[61998]: self.force_reraise() [ 747.703031] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.703031] env[61998]: raise self.value [ 747.703031] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.703031] env[61998]: updated_port = self._update_port( [ 747.703031] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.703031] env[61998]: _ensure_no_port_binding_failure(port) [ 747.703031] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.703031] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 747.703876] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. [ 747.703876] env[61998]: Removing descriptor: 15 [ 747.703876] env[61998]: ERROR nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Traceback (most recent call last): [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] yield resources [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self.driver.spawn(context, instance, image_meta, [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 747.703876] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] vm_ref = self.build_virtual_machine(instance, [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] vif_infos = vmwarevif.get_vif_info(self._session, [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] for vif in network_info: [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return self._sync_wrapper(fn, *args, **kwargs) [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self.wait() [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self[:] = self._gt.wait() [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return self._exit_event.wait() [ 747.704302] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] result = hub.switch() [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return self.greenlet.switch() [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] result = function(*args, **kwargs) [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return func(*args, **kwargs) [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] raise e [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] nwinfo = self.network_api.allocate_for_instance( [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.704702] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] created_port_ids = self._update_ports_for_instance( [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] with excutils.save_and_reraise_exception(): [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self.force_reraise() [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] raise self.value [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] updated_port = self._update_port( [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] _ensure_no_port_binding_failure(port) [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.705193] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] raise exception.PortBindingFailed(port_id=port['id']) [ 747.705560] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] nova.exception.PortBindingFailed: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. [ 747.705560] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] [ 747.705560] env[61998]: INFO nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Terminating instance [ 747.705560] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.832559] env[61998]: DEBUG nova.network.neutron [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.992137] env[61998]: DEBUG nova.network.neutron [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.995535] env[61998]: DEBUG nova.compute.utils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.996049] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 747.996049] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 748.084551] env[61998]: DEBUG nova.policy [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea7bc069037447c4bd9451e0f9106689', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad0aa62c68274ebd9dc50e07a08a3e76', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 748.293122] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3301d47-464c-455c-9a07-08451af6f49f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.298308] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b81fb4f-fb3c-4178-98f8-0b9851080449 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.329788] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b368d1a6-e5b1-41a9-a011-9c45dd8dbaf0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.337586] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34572749-09c0-4bf9-9e2e-a9fd758850d3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.351031] env[61998]: DEBUG nova.compute.provider_tree [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.494552] env[61998]: DEBUG oslo_concurrency.lockutils [req-f476f836-26ad-4e6b-b45f-10ba12ea65fa req-b852855b-9037-4d1e-96d5-0aaca843eeeb service nova] Releasing lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.494958] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.495161] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.502312] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 748.654022] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Successfully created port: b740a604-956e-479c-91d9-b4de2a6b8835 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.857192] env[61998]: DEBUG nova.scheduler.client.report [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 749.021409] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.367316] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.877s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.368360] env[61998]: ERROR nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Traceback (most recent call last): [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self.driver.spawn(context, instance, image_meta, [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] vm_ref = self.build_virtual_machine(instance, [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.368360] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] for vif in network_info: [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return self._sync_wrapper(fn, *args, **kwargs) [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self.wait() [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self[:] = self._gt.wait() [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return self._exit_event.wait() [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] result = hub.switch() [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 749.368762] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return self.greenlet.switch() [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] result = function(*args, **kwargs) [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] return func(*args, **kwargs) [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] raise e [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] nwinfo = self.network_api.allocate_for_instance( [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] created_port_ids = self._update_ports_for_instance( [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] with excutils.save_and_reraise_exception(): [ 749.369212] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] self.force_reraise() [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] raise self.value [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] updated_port = self._update_port( [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] _ensure_no_port_binding_failure(port) [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] raise exception.PortBindingFailed(port_id=port['id']) [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] nova.exception.PortBindingFailed: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. [ 749.370052] env[61998]: ERROR nova.compute.manager [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] [ 749.370445] env[61998]: DEBUG nova.compute.utils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 749.371436] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Build of instance dce49aac-03f3-48ed-9bad-c5eb2d779bae was re-scheduled: Binding failed for port 88a38d4c-6e6e-44b6-bd38-68673f87c6a7, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 749.374051] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 749.374051] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Acquiring lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.374051] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Acquired lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.374051] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.374051] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.833s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.379540] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.384590] env[61998]: DEBUG nova.compute.manager [req-b96582b7-0014-449a-9948-6dea7986b1ec req-edf2f3e2-d936-4094-8544-38ac277ca1a4 service nova] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Received event network-vif-deleted-7588c242-9de5-4d72-89db-ba1d0d17e49b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 749.512907] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 749.548072] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.548072] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.548072] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.548409] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.548409] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.548409] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.549648] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.549883] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.550131] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.550554] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.550771] env[61998]: DEBUG nova.virt.hardware [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.551974] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8a5cc3-ae02-433c-8489-154da48c2957 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.562042] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf02105-3525-4566-b9c9-d7ff7ecfd832 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.886020] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.886333] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 749.886608] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.887160] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b02f8f56-9e05-4f65-b75e-852c8457cd0f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.899375] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fe8297-2e9e-4b53-b2b0-2de2a9c57c4d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.914802] env[61998]: ERROR nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. [ 749.914802] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 749.914802] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 749.914802] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 749.914802] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.914802] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 749.914802] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.914802] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 749.914802] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.914802] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 749.914802] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.914802] env[61998]: ERROR nova.compute.manager raise self.value [ 749.914802] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.914802] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 749.914802] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.914802] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 749.915363] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.915363] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 749.915363] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. [ 749.915363] env[61998]: ERROR nova.compute.manager [ 749.915707] env[61998]: Traceback (most recent call last): [ 749.915802] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 749.915802] env[61998]: listener.cb(fileno) [ 749.915888] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.915888] env[61998]: result = function(*args, **kwargs) [ 749.915966] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.915966] env[61998]: return func(*args, **kwargs) [ 749.916059] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 749.916059] env[61998]: raise e [ 749.916138] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 749.916138] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 749.916212] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.916212] env[61998]: created_port_ids = self._update_ports_for_instance( [ 749.916292] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.916292] env[61998]: with excutils.save_and_reraise_exception(): [ 749.916366] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.916366] env[61998]: self.force_reraise() [ 749.916436] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.916436] env[61998]: raise self.value [ 749.916506] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.916506] env[61998]: updated_port = self._update_port( [ 749.916575] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.916575] env[61998]: _ensure_no_port_binding_failure(port) [ 749.916644] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.916644] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 749.916727] env[61998]: nova.exception.PortBindingFailed: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. [ 749.916777] env[61998]: Removing descriptor: 15 [ 749.917691] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.922491] env[61998]: ERROR nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Traceback (most recent call last): [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] yield resources [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self.driver.spawn(context, instance, image_meta, [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] vm_ref = self.build_virtual_machine(instance, [ 749.922491] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] for vif in network_info: [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return self._sync_wrapper(fn, *args, **kwargs) [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self.wait() [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self[:] = self._gt.wait() [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return self._exit_event.wait() [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 749.922954] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] result = hub.switch() [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return self.greenlet.switch() [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] result = function(*args, **kwargs) [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return func(*args, **kwargs) [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] raise e [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] nwinfo = self.network_api.allocate_for_instance( [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] created_port_ids = self._update_ports_for_instance( [ 749.923414] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] with excutils.save_and_reraise_exception(): [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self.force_reraise() [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] raise self.value [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] updated_port = self._update_port( [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] _ensure_no_port_binding_failure(port) [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] raise exception.PortBindingFailed(port_id=port['id']) [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] nova.exception.PortBindingFailed: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. [ 749.923887] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] [ 749.926667] env[61998]: INFO nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Terminating instance [ 749.927492] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.927777] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquired lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.928070] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.942593] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a733a167-9713-43b7-bcc0-b0af47879ffc could not be found. [ 749.942982] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 749.943409] env[61998]: INFO nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Took 0.06 seconds to destroy the instance on the hypervisor. [ 749.943795] env[61998]: DEBUG oslo.service.loopingcall [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.947541] env[61998]: DEBUG nova.compute.manager [-] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 749.947794] env[61998]: DEBUG nova.network.neutron [-] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 749.986220] env[61998]: DEBUG nova.network.neutron [-] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.029246] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.243803] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d905beb-7ce7-48bf-9dc3-4c014642ad7d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.252603] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b0a2f3-1e9c-4951-8d4f-5894e0658f71 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.287962] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56dcad3-5207-445b-83f2-28bf38ab15f1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.295323] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c4abb6-c80d-4e26-903e-f2579205379a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.308680] env[61998]: DEBUG nova.compute.provider_tree [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.447253] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.489075] env[61998]: DEBUG nova.network.neutron [-] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.531590] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Releasing lock "refresh_cache-dce49aac-03f3-48ed-9bad-c5eb2d779bae" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.531903] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 750.532098] env[61998]: DEBUG nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 750.532326] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.558195] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.630952] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.816062] env[61998]: DEBUG nova.scheduler.client.report [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 750.993407] env[61998]: INFO nova.compute.manager [-] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Took 1.05 seconds to deallocate network for instance. [ 750.996640] env[61998]: DEBUG nova.compute.claims [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 750.996640] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.061348] env[61998]: DEBUG nova.network.neutron [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.133705] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Releasing lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.134233] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 751.134483] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.134818] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-606e3b2a-d815-4bef-8583-2860c4ec5a07 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.144383] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c63c832-2769-4c50-9735-7e3f8e3b7b45 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.167540] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56e74975-e4fa-4ff8-ab87-aa74125dab78 could not be found. [ 751.167761] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.167952] env[61998]: INFO nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Took 0.03 seconds to destroy the instance on the hypervisor. [ 751.168200] env[61998]: DEBUG oslo.service.loopingcall [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.168425] env[61998]: DEBUG nova.compute.manager [-] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 751.168518] env[61998]: DEBUG nova.network.neutron [-] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.187078] env[61998]: DEBUG nova.network.neutron [-] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.321532] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.948s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.322132] env[61998]: ERROR nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Traceback (most recent call last): [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self.driver.spawn(context, instance, image_meta, [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] vm_ref = self.build_virtual_machine(instance, [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] vif_infos = vmwarevif.get_vif_info(self._session, [ 751.322132] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] for vif in network_info: [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return self._sync_wrapper(fn, *args, **kwargs) [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self.wait() [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self[:] = self._gt.wait() [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return self._exit_event.wait() [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] result = hub.switch() [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 751.322515] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return self.greenlet.switch() [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] result = function(*args, **kwargs) [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] return func(*args, **kwargs) [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] raise e [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] nwinfo = self.network_api.allocate_for_instance( [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] created_port_ids = self._update_ports_for_instance( [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] with excutils.save_and_reraise_exception(): [ 751.322961] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] self.force_reraise() [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] raise self.value [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] updated_port = self._update_port( [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] _ensure_no_port_binding_failure(port) [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] raise exception.PortBindingFailed(port_id=port['id']) [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] nova.exception.PortBindingFailed: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. [ 751.323444] env[61998]: ERROR nova.compute.manager [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] [ 751.323851] env[61998]: DEBUG nova.compute.utils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 751.324080] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.124s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.325944] env[61998]: INFO nova.compute.claims [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.328403] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Build of instance 87f859c6-7a96-4a48-adb8-814a134ad4c8 was re-scheduled: Binding failed for port 2e590a46-aa26-4965-bc67-bd8f267fdd6e, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 751.328822] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 751.329057] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Acquiring lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.329223] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Acquired lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.329363] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.412762] env[61998]: DEBUG nova.compute.manager [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Received event network-changed-b740a604-956e-479c-91d9-b4de2a6b8835 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 751.412995] env[61998]: DEBUG nova.compute.manager [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Refreshing instance network info cache due to event network-changed-b740a604-956e-479c-91d9-b4de2a6b8835. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 751.413216] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] Acquiring lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.413404] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] Acquired lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.413587] env[61998]: DEBUG nova.network.neutron [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Refreshing network info cache for port b740a604-956e-479c-91d9-b4de2a6b8835 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.567679] env[61998]: INFO nova.compute.manager [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] [instance: dce49aac-03f3-48ed-9bad-c5eb2d779bae] Took 1.03 seconds to deallocate network for instance. [ 751.689866] env[61998]: DEBUG nova.network.neutron [-] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.854077] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.940553] env[61998]: DEBUG nova.network.neutron [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.984275] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.054490] env[61998]: DEBUG nova.network.neutron [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.192658] env[61998]: INFO nova.compute.manager [-] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Took 1.02 seconds to deallocate network for instance. [ 752.195087] env[61998]: DEBUG nova.compute.claims [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 752.195287] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.486403] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Releasing lock "refresh_cache-87f859c6-7a96-4a48-adb8-814a134ad4c8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.486649] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 752.486826] env[61998]: DEBUG nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 752.486992] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.503917] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.557255] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.557482] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.558077] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] Releasing lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.558314] env[61998]: DEBUG nova.compute.manager [req-7a3c42b4-8d84-4c9e-9b0a-6ac1ae61b657 req-1ef5c4d9-42e6-4bdf-a2cb-86eb1d6405a2 service nova] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Received event network-vif-deleted-b740a604-956e-479c-91d9-b4de2a6b8835 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 752.594079] env[61998]: INFO nova.scheduler.client.report [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Deleted allocations for instance dce49aac-03f3-48ed-9bad-c5eb2d779bae [ 752.604488] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47be9865-43cc-4b03-882f-d9bf1437b9a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.615172] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8719a3b2-edd7-4f6d-858e-1e0e15d448f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.648210] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342877e4-a4b6-4553-8a52-328a91414ce9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.656831] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac28974-e8fb-423a-bc87-0b8158daeb59 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.677975] env[61998]: DEBUG nova.compute.provider_tree [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.007939] env[61998]: DEBUG nova.network.neutron [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.108064] env[61998]: DEBUG oslo_concurrency.lockutils [None req-aa6c2ec3-d739-4bac-87f0-ebbad3d9388c tempest-ServersTestFqdnHostnames-871339318 tempest-ServersTestFqdnHostnames-871339318-project-member] Lock "dce49aac-03f3-48ed-9bad-c5eb2d779bae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.748s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.181699] env[61998]: DEBUG nova.scheduler.client.report [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 753.512749] env[61998]: INFO nova.compute.manager [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] [instance: 87f859c6-7a96-4a48-adb8-814a134ad4c8] Took 1.03 seconds to deallocate network for instance. [ 753.614199] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 753.691017] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.691017] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 753.697664] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.829s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.697985] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.698072] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 753.700783] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.715s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.701436] env[61998]: INFO nova.compute.claims [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.704819] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d6fc0c-85f5-4c49-b02b-5e95d2b62a9a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.713798] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d709237c-8662-4493-b094-b91c7095db34 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.727694] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e562c3e9-3e37-42e3-948a-547b421fc26e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.735594] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e49a1f-eaad-472d-b7c4-2875a151b456 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.767144] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181361MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 753.767309] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.137812] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.205989] env[61998]: DEBUG nova.compute.utils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.207974] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 754.207974] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 754.271867] env[61998]: DEBUG nova.policy [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5f2c10ceb02461095c74c83d4a745d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e4e8f3e000f4c2383b3a39f24499577', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 754.555044] env[61998]: INFO nova.scheduler.client.report [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Deleted allocations for instance 87f859c6-7a96-4a48-adb8-814a134ad4c8 [ 754.704243] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Successfully created port: 0ff860cb-d237-47e4-bd6b-857bd5a53937 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.710847] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 755.008796] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfc62d2-258a-4acd-a03e-3269429445eb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.019020] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eec02c9-0b35-4682-977d-cda7f1a00e26 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.049234] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dc72f5-8e06-4539-b5e2-a138a8669cbf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.057968] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4068e177-590e-4a47-81e0-965462524362 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.071396] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7cc050c0-893b-4f4f-b161-fd7f83fef46d tempest-AttachInterfacesUnderV243Test-1386038916 tempest-AttachInterfacesUnderV243Test-1386038916-project-member] Lock "87f859c6-7a96-4a48-adb8-814a134ad4c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.678s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.071880] env[61998]: DEBUG nova.compute.provider_tree [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.576522] env[61998]: DEBUG nova.scheduler.client.report [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 755.581122] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 755.723510] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 755.752254] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 755.752500] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 755.752652] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.752829] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 755.752970] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.753173] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 755.753407] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 755.753567] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 755.753732] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 755.753889] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 755.754080] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.754942] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80c1fa8-ce0c-44a8-a150-2369db624fd1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.764564] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d619244-b4d6-4d09-af65-830bd41e5dfd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.844319] env[61998]: DEBUG nova.compute.manager [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Received event network-changed-0ff860cb-d237-47e4-bd6b-857bd5a53937 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 755.844461] env[61998]: DEBUG nova.compute.manager [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Refreshing instance network info cache due to event network-changed-0ff860cb-d237-47e4-bd6b-857bd5a53937. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 755.844662] env[61998]: DEBUG oslo_concurrency.lockutils [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] Acquiring lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.844795] env[61998]: DEBUG oslo_concurrency.lockutils [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] Acquired lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.844945] env[61998]: DEBUG nova.network.neutron [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Refreshing network info cache for port 0ff860cb-d237-47e4-bd6b-857bd5a53937 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 756.084890] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.085564] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 756.088259] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.671s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.123685] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.246760] env[61998]: ERROR nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. [ 756.246760] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 756.246760] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 756.246760] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 756.246760] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.246760] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 756.246760] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.246760] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 756.246760] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.246760] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 756.246760] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.246760] env[61998]: ERROR nova.compute.manager raise self.value [ 756.246760] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.246760] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 756.246760] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.246760] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 756.247330] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.247330] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 756.247330] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. [ 756.247330] env[61998]: ERROR nova.compute.manager [ 756.247330] env[61998]: Traceback (most recent call last): [ 756.247330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 756.247330] env[61998]: listener.cb(fileno) [ 756.247330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.247330] env[61998]: result = function(*args, **kwargs) [ 756.247330] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 756.247330] env[61998]: return func(*args, **kwargs) [ 756.247330] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 756.247330] env[61998]: raise e [ 756.247330] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 756.247330] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 756.247330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.247330] env[61998]: created_port_ids = self._update_ports_for_instance( [ 756.247330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.247330] env[61998]: with excutils.save_and_reraise_exception(): [ 756.247330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.247330] env[61998]: self.force_reraise() [ 756.247330] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.247330] env[61998]: raise self.value [ 756.247330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.247330] env[61998]: updated_port = self._update_port( [ 756.247330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.247330] env[61998]: _ensure_no_port_binding_failure(port) [ 756.247330] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.247330] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 756.248353] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. [ 756.248353] env[61998]: Removing descriptor: 15 [ 756.248353] env[61998]: ERROR nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Traceback (most recent call last): [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] yield resources [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self.driver.spawn(context, instance, image_meta, [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.248353] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] vm_ref = self.build_virtual_machine(instance, [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] for vif in network_info: [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return self._sync_wrapper(fn, *args, **kwargs) [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self.wait() [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self[:] = self._gt.wait() [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return self._exit_event.wait() [ 756.248778] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] result = hub.switch() [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return self.greenlet.switch() [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] result = function(*args, **kwargs) [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return func(*args, **kwargs) [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] raise e [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] nwinfo = self.network_api.allocate_for_instance( [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.249244] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] created_port_ids = self._update_ports_for_instance( [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] with excutils.save_and_reraise_exception(): [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self.force_reraise() [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] raise self.value [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] updated_port = self._update_port( [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] _ensure_no_port_binding_failure(port) [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.250858] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] raise exception.PortBindingFailed(port_id=port['id']) [ 756.251367] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] nova.exception.PortBindingFailed: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. [ 756.251367] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] [ 756.251367] env[61998]: INFO nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Terminating instance [ 756.251367] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.371964] env[61998]: DEBUG nova.network.neutron [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.518818] env[61998]: DEBUG nova.network.neutron [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.600407] env[61998]: DEBUG nova.compute.utils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 756.602196] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 756.603075] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.656093] env[61998]: DEBUG nova.policy [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5f2c10ceb02461095c74c83d4a745d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e4e8f3e000f4c2383b3a39f24499577', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 756.896083] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39cc2c8-4ff8-435d-b372-096945c97c2a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.908007] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084808d3-8ba3-41e6-aea7-a3a046e42e12 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.943567] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c62338f-b005-4de8-a753-f7c34eb01701 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.951970] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7783f6-b1a9-4623-99be-a0f6dae55a72 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.965957] env[61998]: DEBUG nova.compute.provider_tree [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.025437] env[61998]: DEBUG oslo_concurrency.lockutils [req-50587183-4ec4-4dc2-b09f-fbceb1e1853f req-d2814f06-77fd-4af4-b05a-8a8304326168 service nova] Releasing lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.025854] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.026046] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.106666] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 757.161249] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Successfully created port: 60237559-6ae1-4690-8aed-f076fb3cc4d0 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.470915] env[61998]: DEBUG nova.scheduler.client.report [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 757.552304] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.747218] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.871504] env[61998]: DEBUG nova.compute.manager [req-faf0cfbf-d321-44ae-8733-98613623c57d req-ed098871-4a94-4434-999b-bdf3e0897cdb service nova] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Received event network-vif-deleted-0ff860cb-d237-47e4-bd6b-857bd5a53937 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 757.975053] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.887s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.975758] env[61998]: ERROR nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Traceback (most recent call last): [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self.driver.spawn(context, instance, image_meta, [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] vm_ref = self.build_virtual_machine(instance, [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.975758] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] for vif in network_info: [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return self._sync_wrapper(fn, *args, **kwargs) [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self.wait() [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self[:] = self._gt.wait() [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return self._exit_event.wait() [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] result = hub.switch() [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.976264] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return self.greenlet.switch() [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] result = function(*args, **kwargs) [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] return func(*args, **kwargs) [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] raise e [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] nwinfo = self.network_api.allocate_for_instance( [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] created_port_ids = self._update_ports_for_instance( [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] with excutils.save_and_reraise_exception(): [ 757.976776] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] self.force_reraise() [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] raise self.value [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] updated_port = self._update_port( [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] _ensure_no_port_binding_failure(port) [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] raise exception.PortBindingFailed(port_id=port['id']) [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] nova.exception.PortBindingFailed: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. [ 757.977194] env[61998]: ERROR nova.compute.manager [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] [ 757.977580] env[61998]: DEBUG nova.compute.utils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 757.978447] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.440s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.980199] env[61998]: INFO nova.compute.claims [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.983490] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Build of instance bc1ef57d-457d-446a-8ad4-3bab6d331215 was re-scheduled: Binding failed for port ae530f13-5c15-41af-927d-17c2acf2a6fa, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 757.983972] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 757.984251] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.988019] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.988019] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.119278] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 758.147913] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 758.147913] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 758.148105] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.148192] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 758.148331] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.148471] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 758.148673] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 758.149011] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 758.149011] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 758.149345] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 758.149679] env[61998]: DEBUG nova.virt.hardware [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 758.150861] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58adbfeb-fb0d-48eb-9ab5-f7b90c2bd552 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.159600] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a74f14-1aef-4d7f-8a54-6b7b267ea298 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.249081] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.249601] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 758.249792] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 758.250104] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b546f91f-7e1e-48f8-a4f5-cf53075ec30b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.259738] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d652d30f-e154-4f26-a1d9-8a41e0f59e20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.283409] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e632307a-ffe9-45a6-9224-8598aea5d269 could not be found. [ 758.283653] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 758.283852] env[61998]: INFO nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Took 0.03 seconds to destroy the instance on the hypervisor. [ 758.284110] env[61998]: DEBUG oslo.service.loopingcall [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.284342] env[61998]: DEBUG nova.compute.manager [-] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 758.284439] env[61998]: DEBUG nova.network.neutron [-] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 758.299432] env[61998]: DEBUG nova.network.neutron [-] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.336055] env[61998]: ERROR nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. [ 758.336055] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 758.336055] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 758.336055] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 758.336055] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.336055] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 758.336055] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.336055] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 758.336055] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.336055] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 758.336055] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.336055] env[61998]: ERROR nova.compute.manager raise self.value [ 758.336055] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.336055] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 758.336055] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.336055] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 758.336578] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.336578] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 758.336578] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. [ 758.336578] env[61998]: ERROR nova.compute.manager [ 758.336578] env[61998]: Traceback (most recent call last): [ 758.336578] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 758.336578] env[61998]: listener.cb(fileno) [ 758.336578] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.336578] env[61998]: result = function(*args, **kwargs) [ 758.336578] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.336578] env[61998]: return func(*args, **kwargs) [ 758.336578] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 758.336578] env[61998]: raise e [ 758.336578] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 758.336578] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 758.336578] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.336578] env[61998]: created_port_ids = self._update_ports_for_instance( [ 758.336578] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.336578] env[61998]: with excutils.save_and_reraise_exception(): [ 758.336578] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.336578] env[61998]: self.force_reraise() [ 758.336578] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.336578] env[61998]: raise self.value [ 758.336578] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.336578] env[61998]: updated_port = self._update_port( [ 758.336578] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.336578] env[61998]: _ensure_no_port_binding_failure(port) [ 758.336578] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.336578] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 758.337443] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. [ 758.337443] env[61998]: Removing descriptor: 15 [ 758.337443] env[61998]: ERROR nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Traceback (most recent call last): [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] yield resources [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self.driver.spawn(context, instance, image_meta, [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 758.337443] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] vm_ref = self.build_virtual_machine(instance, [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] vif_infos = vmwarevif.get_vif_info(self._session, [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] for vif in network_info: [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return self._sync_wrapper(fn, *args, **kwargs) [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self.wait() [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self[:] = self._gt.wait() [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return self._exit_event.wait() [ 758.337797] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] result = hub.switch() [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return self.greenlet.switch() [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] result = function(*args, **kwargs) [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return func(*args, **kwargs) [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] raise e [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] nwinfo = self.network_api.allocate_for_instance( [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.338217] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] created_port_ids = self._update_ports_for_instance( [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] with excutils.save_and_reraise_exception(): [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self.force_reraise() [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] raise self.value [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] updated_port = self._update_port( [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] _ensure_no_port_binding_failure(port) [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.338908] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] raise exception.PortBindingFailed(port_id=port['id']) [ 758.339307] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] nova.exception.PortBindingFailed: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. [ 758.339307] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] [ 758.339307] env[61998]: INFO nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Terminating instance [ 758.339307] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.339439] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.339535] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.510487] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.631900] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.802260] env[61998]: DEBUG nova.network.neutron [-] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.856353] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.960874] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.134327] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Releasing lock "refresh_cache-bc1ef57d-457d-446a-8ad4-3bab6d331215" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.134668] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 759.135068] env[61998]: DEBUG nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 759.135332] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.157725] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.246735] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f2d534-d04b-45d8-a5ab-1b7a575ba588 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.254809] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34e2fe8-e4cd-4026-867a-22e88c60e456 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.291017] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a325e236-e4eb-4f65-9080-f51b4a0be1ea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.298191] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc85fda0-b6a8-4da5-846b-bdb5a724d812 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.312152] env[61998]: INFO nova.compute.manager [-] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Took 1.03 seconds to deallocate network for instance. [ 759.312844] env[61998]: DEBUG nova.compute.provider_tree [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.320022] env[61998]: DEBUG nova.compute.claims [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 759.320022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.465274] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.465274] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 759.465274] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.465274] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9b2ed33-2f6f-4c7a-b364-2497447de810 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.474605] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e695872a-d68c-4570-a167-f85d666d59bb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.497143] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b9ec575c-034b-46bc-afbd-7a8a07a8e005 could not be found. [ 759.497367] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.497544] env[61998]: INFO nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Took 0.03 seconds to destroy the instance on the hypervisor. [ 759.497778] env[61998]: DEBUG oslo.service.loopingcall [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.497995] env[61998]: DEBUG nova.compute.manager [-] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 759.498097] env[61998]: DEBUG nova.network.neutron [-] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.514073] env[61998]: DEBUG nova.network.neutron [-] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.662369] env[61998]: DEBUG nova.network.neutron [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.818534] env[61998]: DEBUG nova.scheduler.client.report [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 759.910699] env[61998]: DEBUG nova.compute.manager [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Received event network-changed-60237559-6ae1-4690-8aed-f076fb3cc4d0 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 759.910909] env[61998]: DEBUG nova.compute.manager [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Refreshing instance network info cache due to event network-changed-60237559-6ae1-4690-8aed-f076fb3cc4d0. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 759.911423] env[61998]: DEBUG oslo_concurrency.lockutils [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] Acquiring lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.911619] env[61998]: DEBUG oslo_concurrency.lockutils [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] Acquired lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.911786] env[61998]: DEBUG nova.network.neutron [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Refreshing network info cache for port 60237559-6ae1-4690-8aed-f076fb3cc4d0 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.016283] env[61998]: DEBUG nova.network.neutron [-] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.165499] env[61998]: INFO nova.compute.manager [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: bc1ef57d-457d-446a-8ad4-3bab6d331215] Took 1.03 seconds to deallocate network for instance. [ 760.324019] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.324019] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 760.326027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.873s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.327443] env[61998]: INFO nova.compute.claims [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.433162] env[61998]: DEBUG nova.network.neutron [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.519625] env[61998]: INFO nova.compute.manager [-] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Took 1.02 seconds to deallocate network for instance. [ 760.524516] env[61998]: DEBUG nova.compute.claims [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.526240] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.527295] env[61998]: DEBUG nova.network.neutron [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.832670] env[61998]: DEBUG nova.compute.utils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 760.841028] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 760.841028] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 760.897626] env[61998]: DEBUG nova.policy [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '479ce49eee9849d48f7683f8232b69c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6af19c6aedd463d8a1fa6a6b00ff852', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 761.029911] env[61998]: DEBUG oslo_concurrency.lockutils [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] Releasing lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.030217] env[61998]: DEBUG nova.compute.manager [req-5258a0c8-8480-46eb-9191-837b17efce24 req-b9a83b2d-ac90-477c-a89d-c66bb21726b2 service nova] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Received event network-vif-deleted-60237559-6ae1-4690-8aed-f076fb3cc4d0 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 761.194452] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Successfully created port: ddb3860b-63de-40cc-aaf6-bc715ccafb4e {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.204028] env[61998]: INFO nova.scheduler.client.report [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Deleted allocations for instance bc1ef57d-457d-446a-8ad4-3bab6d331215 [ 761.343141] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 761.651046] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc94fc0-dca8-4419-97a1-fb5b2c2f0157 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.658748] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c64da60-0410-402a-ab00-8f5e83f05fbb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.690205] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49496f3-6cd2-487d-9ce9-4c96cb53bc15 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.698162] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b96772-1db7-4184-b88f-ce7364b1421d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.711404] env[61998]: DEBUG nova.compute.provider_tree [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.715552] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c296236-c4ea-4597-904a-3e8828f27fcb tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "bc1ef57d-457d-446a-8ad4-3bab6d331215" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.061s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.214843] env[61998]: DEBUG nova.scheduler.client.report [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 762.220043] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 762.239287] env[61998]: DEBUG nova.compute.manager [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Received event network-changed-ddb3860b-63de-40cc-aaf6-bc715ccafb4e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 762.239556] env[61998]: DEBUG nova.compute.manager [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Refreshing instance network info cache due to event network-changed-ddb3860b-63de-40cc-aaf6-bc715ccafb4e. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 762.239737] env[61998]: DEBUG oslo_concurrency.lockutils [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] Acquiring lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.239830] env[61998]: DEBUG oslo_concurrency.lockutils [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] Acquired lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.239986] env[61998]: DEBUG nova.network.neutron [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Refreshing network info cache for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 762.358029] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 762.389113] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 762.389369] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 762.389531] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.389708] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 762.389851] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.389995] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 762.390430] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 762.390658] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 762.390888] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 762.394020] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 762.394020] env[61998]: DEBUG nova.virt.hardware [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 762.394020] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf9c024-0903-4090-8a2e-93a125bab851 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.402175] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32e4734-639e-4566-a273-6641fcf21c93 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.408233] env[61998]: ERROR nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. [ 762.408233] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 762.408233] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 762.408233] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 762.408233] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 762.408233] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 762.408233] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 762.408233] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 762.408233] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.408233] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 762.408233] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.408233] env[61998]: ERROR nova.compute.manager raise self.value [ 762.408233] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 762.408233] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 762.408233] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.408233] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 762.408791] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.408791] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 762.408791] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. [ 762.408791] env[61998]: ERROR nova.compute.manager [ 762.408791] env[61998]: Traceback (most recent call last): [ 762.408791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 762.408791] env[61998]: listener.cb(fileno) [ 762.408791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 762.408791] env[61998]: result = function(*args, **kwargs) [ 762.408791] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 762.408791] env[61998]: return func(*args, **kwargs) [ 762.408791] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 762.408791] env[61998]: raise e [ 762.408791] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 762.408791] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 762.408791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 762.408791] env[61998]: created_port_ids = self._update_ports_for_instance( [ 762.408791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 762.408791] env[61998]: with excutils.save_and_reraise_exception(): [ 762.408791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.408791] env[61998]: self.force_reraise() [ 762.408791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.408791] env[61998]: raise self.value [ 762.408791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 762.408791] env[61998]: updated_port = self._update_port( [ 762.408791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.408791] env[61998]: _ensure_no_port_binding_failure(port) [ 762.408791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.408791] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 762.409802] env[61998]: nova.exception.PortBindingFailed: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. [ 762.409802] env[61998]: Removing descriptor: 15 [ 762.419162] env[61998]: ERROR nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Traceback (most recent call last): [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] yield resources [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self.driver.spawn(context, instance, image_meta, [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] vm_ref = self.build_virtual_machine(instance, [ 762.419162] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] for vif in network_info: [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] return self._sync_wrapper(fn, *args, **kwargs) [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self.wait() [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self[:] = self._gt.wait() [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] return self._exit_event.wait() [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 762.419643] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] current.throw(*self._exc) [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] result = function(*args, **kwargs) [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] return func(*args, **kwargs) [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] raise e [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] nwinfo = self.network_api.allocate_for_instance( [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] created_port_ids = self._update_ports_for_instance( [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] with excutils.save_and_reraise_exception(): [ 762.420123] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self.force_reraise() [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] raise self.value [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] updated_port = self._update_port( [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] _ensure_no_port_binding_failure(port) [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] raise exception.PortBindingFailed(port_id=port['id']) [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] nova.exception.PortBindingFailed: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. [ 762.420582] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] [ 762.420582] env[61998]: INFO nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Terminating instance [ 762.422112] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Acquiring lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.632799] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.632799] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.724281] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.724863] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 762.729471] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.339s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.733501] env[61998]: INFO nova.compute.claims [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.755343] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.774213] env[61998]: DEBUG nova.network.neutron [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.918850] env[61998]: DEBUG nova.network.neutron [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.231955] env[61998]: DEBUG nova.compute.utils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 763.232823] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 763.233404] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 763.280913] env[61998]: DEBUG nova.policy [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6423d0a86724e239792039797cf44f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f9d84ad259f469881b6d87317f7c26c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 763.422096] env[61998]: DEBUG oslo_concurrency.lockutils [req-2cd0e603-f547-429b-af4b-c41576d05535 req-b12eb45d-0996-4385-a816-e3fffab16e64 service nova] Releasing lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.422536] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Acquired lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.422735] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.661462] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Successfully created port: 83835327-ef7b-48ef-b628-4e777b266394 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.740672] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 763.952088] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.973990] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Successfully created port: c90065aa-bc22-4c29-93bf-a54ea4749268 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.053820] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e031ef-099f-4201-a1a6-f325df2c2b0f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.059057] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.066147] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0da4fb-4474-4652-adf6-fc2703a5f708 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.101630] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4c9840-2a75-4b40-8636-47459976c3fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.111113] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b0e4cc-47d6-4166-9d5d-6c214bb5cf5b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.132844] env[61998]: DEBUG nova.compute.provider_tree [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.353302] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "dadd9985-bca3-4207-927f-9490e0ae3f10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.353674] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.357017] env[61998]: DEBUG nova.compute.manager [req-eca50344-dd9e-4ce9-9155-32dff84f296b req-bde8f95d-5559-4534-b897-024c44b14c66 service nova] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Received event network-vif-deleted-ddb3860b-63de-40cc-aaf6-bc715ccafb4e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 764.567633] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Releasing lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.567633] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 764.567633] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.567633] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e936aba-368a-46fc-8515-bf809c6ba8a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.577790] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8351171b-132c-4b4d-8aad-dfef7e370db3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.599385] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e49e77d6-8d5e-4d89-b129-ac34cd1969c1 could not be found. [ 764.599638] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 764.599832] env[61998]: INFO nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 764.600096] env[61998]: DEBUG oslo.service.loopingcall [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.600315] env[61998]: DEBUG nova.compute.manager [-] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 764.600408] env[61998]: DEBUG nova.network.neutron [-] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 764.614457] env[61998]: DEBUG nova.network.neutron [-] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.632417] env[61998]: DEBUG nova.scheduler.client.report [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 764.747184] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 764.775954] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.776213] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.776365] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.776539] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.776680] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.776829] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.777101] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.777262] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.777463] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.777594] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.777772] env[61998]: DEBUG nova.virt.hardware [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.779019] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b98e1b-a0a0-4c32-85c8-79676d53848f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.787049] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d093442-24ad-4241-8498-7c2e17177947 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.069619] env[61998]: ERROR nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. [ 765.069619] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 765.069619] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 765.069619] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 765.069619] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.069619] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 765.069619] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.069619] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 765.069619] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.069619] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 765.069619] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.069619] env[61998]: ERROR nova.compute.manager raise self.value [ 765.069619] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.069619] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 765.069619] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.069619] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 765.071087] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.071087] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 765.071087] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. [ 765.071087] env[61998]: ERROR nova.compute.manager [ 765.071087] env[61998]: Traceback (most recent call last): [ 765.071087] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 765.071087] env[61998]: listener.cb(fileno) [ 765.071087] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.071087] env[61998]: result = function(*args, **kwargs) [ 765.071087] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.071087] env[61998]: return func(*args, **kwargs) [ 765.071087] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 765.071087] env[61998]: raise e [ 765.071087] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 765.071087] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 765.071087] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.071087] env[61998]: created_port_ids = self._update_ports_for_instance( [ 765.071087] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.071087] env[61998]: with excutils.save_and_reraise_exception(): [ 765.071087] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.071087] env[61998]: self.force_reraise() [ 765.071087] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.071087] env[61998]: raise self.value [ 765.071087] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.071087] env[61998]: updated_port = self._update_port( [ 765.071087] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.071087] env[61998]: _ensure_no_port_binding_failure(port) [ 765.071087] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.071087] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 765.071731] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. [ 765.071731] env[61998]: Removing descriptor: 15 [ 765.071731] env[61998]: ERROR nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Traceback (most recent call last): [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] yield resources [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self.driver.spawn(context, instance, image_meta, [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.071731] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] vm_ref = self.build_virtual_machine(instance, [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] for vif in network_info: [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return self._sync_wrapper(fn, *args, **kwargs) [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self.wait() [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self[:] = self._gt.wait() [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return self._exit_event.wait() [ 765.071952] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] result = hub.switch() [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return self.greenlet.switch() [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] result = function(*args, **kwargs) [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return func(*args, **kwargs) [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] raise e [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] nwinfo = self.network_api.allocate_for_instance( [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.072210] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] created_port_ids = self._update_ports_for_instance( [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] with excutils.save_and_reraise_exception(): [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self.force_reraise() [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] raise self.value [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] updated_port = self._update_port( [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] _ensure_no_port_binding_failure(port) [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.072468] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] raise exception.PortBindingFailed(port_id=port['id']) [ 765.072712] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] nova.exception.PortBindingFailed: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. [ 765.072712] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] [ 765.072712] env[61998]: INFO nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Terminating instance [ 765.073064] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.073226] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquired lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.073388] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.116647] env[61998]: DEBUG nova.network.neutron [-] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.136943] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.137485] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 765.140315] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.144s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.603932] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.619261] env[61998]: INFO nova.compute.manager [-] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Took 1.02 seconds to deallocate network for instance. [ 765.621538] env[61998]: DEBUG nova.compute.claims [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 765.621770] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.644287] env[61998]: DEBUG nova.compute.utils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 765.646233] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 765.646413] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 765.688568] env[61998]: DEBUG nova.policy [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '744da696f7c64f62ae04195aa737fab4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c75c9b7c8d6b441d80fe512c37c88679', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 765.695030] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.922947] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25606ad4-7946-4f58-91f2-ff479c245b6d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.930843] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68da48f-2f78-474a-a932-079b1ecc5827 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.681889] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 766.684534] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Releasing lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.684932] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 766.685135] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 766.687169] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Successfully created port: 65c428f0-8266-4930-ba7c-15bf1d6c7bd8 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.689060] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59985bb3-9ca8-4f3c-89a4-caf6c6ee5f5b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.691556] env[61998]: DEBUG nova.compute.manager [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Received event network-changed-83835327-ef7b-48ef-b628-4e777b266394 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 766.691749] env[61998]: DEBUG nova.compute.manager [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Refreshing instance network info cache due to event network-changed-83835327-ef7b-48ef-b628-4e777b266394. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 766.691957] env[61998]: DEBUG oslo_concurrency.lockutils [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] Acquiring lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.692111] env[61998]: DEBUG oslo_concurrency.lockutils [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] Acquired lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.692265] env[61998]: DEBUG nova.network.neutron [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Refreshing network info cache for port 83835327-ef7b-48ef-b628-4e777b266394 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.697096] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342731d4-e134-41a0-96ea-bf2c51547218 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.703040] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e2d343-9a7c-4e37-8a1c-596978330de1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.709885] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9b54d2-9917-414d-aa59-6413e14329fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.730516] env[61998]: DEBUG nova.compute.provider_tree [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.735451] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3f2ed06f-27d0-4a73-a678-430db5334147 could not be found. [ 766.735651] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 766.735825] env[61998]: INFO nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Took 0.05 seconds to destroy the instance on the hypervisor. [ 766.736068] env[61998]: DEBUG oslo.service.loopingcall [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.736469] env[61998]: DEBUG nova.compute.manager [-] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 766.736566] env[61998]: DEBUG nova.network.neutron [-] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 766.918070] env[61998]: DEBUG nova.network.neutron [-] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.215026] env[61998]: DEBUG nova.network.neutron [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.238187] env[61998]: DEBUG nova.scheduler.client.report [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 767.321223] env[61998]: DEBUG nova.network.neutron [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.463841] env[61998]: DEBUG nova.compute.manager [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Received event network-changed-65c428f0-8266-4930-ba7c-15bf1d6c7bd8 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 767.464508] env[61998]: DEBUG nova.compute.manager [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Refreshing instance network info cache due to event network-changed-65c428f0-8266-4930-ba7c-15bf1d6c7bd8. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 767.464948] env[61998]: DEBUG oslo_concurrency.lockutils [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] Acquiring lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.465252] env[61998]: DEBUG oslo_concurrency.lockutils [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] Acquired lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.465568] env[61998]: DEBUG nova.network.neutron [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Refreshing network info cache for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.606388] env[61998]: ERROR nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. [ 767.606388] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 767.606388] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 767.606388] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 767.606388] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 767.606388] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 767.606388] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 767.606388] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 767.606388] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.606388] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 767.606388] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.606388] env[61998]: ERROR nova.compute.manager raise self.value [ 767.606388] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 767.606388] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 767.606388] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.606388] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 767.606788] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.606788] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 767.606788] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. [ 767.606788] env[61998]: ERROR nova.compute.manager [ 767.606788] env[61998]: Traceback (most recent call last): [ 767.606788] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 767.606788] env[61998]: listener.cb(fileno) [ 767.606788] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 767.606788] env[61998]: result = function(*args, **kwargs) [ 767.606788] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 767.606788] env[61998]: return func(*args, **kwargs) [ 767.606788] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 767.606788] env[61998]: raise e [ 767.606788] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 767.606788] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 767.606788] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 767.606788] env[61998]: created_port_ids = self._update_ports_for_instance( [ 767.606788] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 767.606788] env[61998]: with excutils.save_and_reraise_exception(): [ 767.606788] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.606788] env[61998]: self.force_reraise() [ 767.606788] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.606788] env[61998]: raise self.value [ 767.606788] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 767.606788] env[61998]: updated_port = self._update_port( [ 767.606788] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.606788] env[61998]: _ensure_no_port_binding_failure(port) [ 767.606788] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.606788] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 767.607541] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. [ 767.607541] env[61998]: Removing descriptor: 17 [ 767.622179] env[61998]: DEBUG nova.network.neutron [-] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.696031] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 767.721527] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 767.721815] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 767.721968] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.722171] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 767.722314] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.722470] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 767.722650] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 767.722803] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 767.722963] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 767.723135] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 767.723290] env[61998]: DEBUG nova.virt.hardware [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 767.724183] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96dc5c1e-21f8-4f30-ab3f-3c9ea33e0b24 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.732083] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2810c268-4b59-46d7-ba2c-f33442f68f2d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.745568] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.605s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.746182] env[61998]: ERROR nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Traceback (most recent call last): [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self.driver.spawn(context, instance, image_meta, [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] vm_ref = self.build_virtual_machine(instance, [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] vif_infos = vmwarevif.get_vif_info(self._session, [ 767.746182] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] for vif in network_info: [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return self._sync_wrapper(fn, *args, **kwargs) [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self.wait() [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self[:] = self._gt.wait() [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return self._exit_event.wait() [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] result = hub.switch() [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 767.746474] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return self.greenlet.switch() [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] result = function(*args, **kwargs) [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] return func(*args, **kwargs) [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] raise e [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] nwinfo = self.network_api.allocate_for_instance( [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] created_port_ids = self._update_ports_for_instance( [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] with excutils.save_and_reraise_exception(): [ 767.746785] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] self.force_reraise() [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] raise self.value [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] updated_port = self._update_port( [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] _ensure_no_port_binding_failure(port) [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] raise exception.PortBindingFailed(port_id=port['id']) [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] nova.exception.PortBindingFailed: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. [ 767.747122] env[61998]: ERROR nova.compute.manager [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] [ 767.747389] env[61998]: DEBUG nova.compute.utils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 767.748500] env[61998]: ERROR nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Traceback (most recent call last): [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] yield resources [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self.driver.spawn(context, instance, image_meta, [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self._vmops.spawn(context, instance, image_meta, injected_files, [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] vm_ref = self.build_virtual_machine(instance, [ 767.748500] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] vif_infos = vmwarevif.get_vif_info(self._session, [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] for vif in network_info: [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] return self._sync_wrapper(fn, *args, **kwargs) [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self.wait() [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self[:] = self._gt.wait() [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] return self._exit_event.wait() [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 767.748735] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] current.throw(*self._exc) [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] result = function(*args, **kwargs) [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] return func(*args, **kwargs) [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] raise e [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] nwinfo = self.network_api.allocate_for_instance( [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] created_port_ids = self._update_ports_for_instance( [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] with excutils.save_and_reraise_exception(): [ 767.748982] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self.force_reraise() [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] raise self.value [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] updated_port = self._update_port( [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] _ensure_no_port_binding_failure(port) [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] raise exception.PortBindingFailed(port_id=port['id']) [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] nova.exception.PortBindingFailed: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. [ 767.749241] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] [ 767.749241] env[61998]: INFO nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Terminating instance [ 767.751323] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Build of instance a733a167-9713-43b7-bcc0-b0af47879ffc was re-scheduled: Binding failed for port 7588c242-9de5-4d72-89db-ba1d0d17e49b, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 767.751729] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 767.751943] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.752100] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.752256] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.753299] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.558s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.765785] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.825745] env[61998]: DEBUG oslo_concurrency.lockutils [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] Releasing lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.826013] env[61998]: DEBUG nova.compute.manager [req-ac8d15c0-e177-4a47-8f44-d013da15d9cc req-49f17903-ef95-4f03-a155-a8d611f897c9 service nova] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Received event network-vif-deleted-83835327-ef7b-48ef-b628-4e777b266394 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 767.982175] env[61998]: DEBUG nova.network.neutron [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.066914] env[61998]: DEBUG nova.network.neutron [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.125167] env[61998]: INFO nova.compute.manager [-] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Took 1.39 seconds to deallocate network for instance. [ 768.127504] env[61998]: DEBUG nova.compute.claims [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 768.127683] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.275484] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.362419] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.508743] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1755a9c3-e76a-472c-a450-2b65b31d6a37 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.516179] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10738798-579e-43fd-b5c3-043b5e949d72 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.546196] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7db0e9-1048-4806-a473-4959328dd9f6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.552966] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5161c5a8-df8a-4c9b-b787-e142aaa2830d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.565489] env[61998]: DEBUG nova.compute.provider_tree [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.569492] env[61998]: DEBUG oslo_concurrency.lockutils [req-10d91ce9-3fd6-49c0-b1d0-62e51fa5704a req-d9c0e28b-b474-4202-84be-975058efb5b9 service nova] Releasing lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.569847] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.570194] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.865018] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "refresh_cache-a733a167-9713-43b7-bcc0-b0af47879ffc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.865299] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 768.865509] env[61998]: DEBUG nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 768.865715] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 768.880441] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.069054] env[61998]: DEBUG nova.scheduler.client.report [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 769.089157] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.160020] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.383209] env[61998]: DEBUG nova.network.neutron [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.490356] env[61998]: DEBUG nova.compute.manager [req-6cb62474-e451-497a-bc90-9290fdf1b07d req-d0ccfb2a-2435-445c-9838-0c5cbb9d0682 service nova] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Received event network-vif-deleted-65c428f0-8266-4930-ba7c-15bf1d6c7bd8 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 769.576100] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.823s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.576749] env[61998]: ERROR nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Traceback (most recent call last): [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self.driver.spawn(context, instance, image_meta, [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] vm_ref = self.build_virtual_machine(instance, [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] vif_infos = vmwarevif.get_vif_info(self._session, [ 769.576749] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] for vif in network_info: [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return self._sync_wrapper(fn, *args, **kwargs) [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self.wait() [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self[:] = self._gt.wait() [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return self._exit_event.wait() [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] result = hub.switch() [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 769.577096] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return self.greenlet.switch() [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] result = function(*args, **kwargs) [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] return func(*args, **kwargs) [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] raise e [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] nwinfo = self.network_api.allocate_for_instance( [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] created_port_ids = self._update_ports_for_instance( [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] with excutils.save_and_reraise_exception(): [ 769.577365] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] self.force_reraise() [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] raise self.value [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] updated_port = self._update_port( [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] _ensure_no_port_binding_failure(port) [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] raise exception.PortBindingFailed(port_id=port['id']) [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] nova.exception.PortBindingFailed: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. [ 769.577621] env[61998]: ERROR nova.compute.manager [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] [ 769.578025] env[61998]: DEBUG nova.compute.utils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 769.578557] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.811s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.583074] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Build of instance 56e74975-e4fa-4ff8-ab87-aa74125dab78 was re-scheduled: Binding failed for port b740a604-956e-479c-91d9-b4de2a6b8835, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 769.583074] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 769.583218] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquiring lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.583355] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Acquired lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.583509] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.663107] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.663608] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 769.663837] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 769.664213] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d6c969a-f806-4872-a732-b62e7be3df52 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.673852] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4cd191-812c-4c5f-81ce-c6b0cc0f693d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.695844] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5c91aad-0f8a-4ad3-8566-7f36ff983575 could not be found. [ 769.696152] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 769.696500] env[61998]: INFO nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Took 0.03 seconds to destroy the instance on the hypervisor. [ 769.696762] env[61998]: DEBUG oslo.service.loopingcall [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.696979] env[61998]: DEBUG nova.compute.manager [-] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 769.697295] env[61998]: DEBUG nova.network.neutron [-] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.711972] env[61998]: DEBUG nova.network.neutron [-] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.886191] env[61998]: INFO nova.compute.manager [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: a733a167-9713-43b7-bcc0-b0af47879ffc] Took 1.02 seconds to deallocate network for instance. [ 770.099854] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.178979] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.214396] env[61998]: DEBUG nova.network.neutron [-] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.681619] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Releasing lock "refresh_cache-56e74975-e4fa-4ff8-ab87-aa74125dab78" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.681920] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 770.682136] env[61998]: DEBUG nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 770.682303] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.699904] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.716377] env[61998]: INFO nova.compute.manager [-] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Took 1.02 seconds to deallocate network for instance. [ 770.718253] env[61998]: DEBUG nova.compute.claims [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 770.718421] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.916237] env[61998]: INFO nova.scheduler.client.report [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Deleted allocations for instance a733a167-9713-43b7-bcc0-b0af47879ffc [ 771.107549] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance a733a167-9713-43b7-bcc0-b0af47879ffc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.202811] env[61998]: DEBUG nova.network.neutron [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.423874] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b6173428-31a9-4cf9-93a1-f39d752b4463 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "a733a167-9713-43b7-bcc0-b0af47879ffc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.877s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.612223] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 56e74975-e4fa-4ff8-ab87-aa74125dab78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.612400] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance e632307a-ffe9-45a6-9224-8598aea5d269 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 771.612547] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance b9ec575c-034b-46bc-afbd-7a8a07a8e005 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 771.612678] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance e49e77d6-8d5e-4d89-b129-ac34cd1969c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 771.612796] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 3f2ed06f-27d0-4a73-a678-430db5334147 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 771.612963] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance f5c91aad-0f8a-4ad3-8566-7f36ff983575 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 771.705782] env[61998]: INFO nova.compute.manager [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] [instance: 56e74975-e4fa-4ff8-ab87-aa74125dab78] Took 1.02 seconds to deallocate network for instance. [ 771.927189] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 772.116278] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.446807] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.481416] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "c84d15dc-0ef2-44e2-b579-104678a6bb07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.481653] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.619583] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance ad1084f8-a0c9-4783-af2d-aa677116a451 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.734485] env[61998]: INFO nova.scheduler.client.report [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Deleted allocations for instance 56e74975-e4fa-4ff8-ab87-aa74125dab78 [ 773.125750] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 429bcfa3-8bca-42c3-9049-b7ae09438f47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.242427] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7768b95d-9858-4c7b-98da-c69856623329 tempest-VolumesAdminNegativeTest-194489036 tempest-VolumesAdminNegativeTest-194489036-project-member] Lock "56e74975-e4fa-4ff8-ab87-aa74125dab78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.170s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.628884] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 2963f997-eb4c-4bfd-be28-6c1b383598c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.745485] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 774.132544] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 08e60642-0784-4898-9de5-444a24fba508 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.272084] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.635559] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 5eb786f1-7789-48a0-a04e-a4039e387f58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.139437] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance a7225abb-d8ea-49fc-85da-7791d9dde5bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.642415] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance b3232fcd-43b2-4139-afe1-fbe863d0af30 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 776.146476] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 776.649662] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 169437f2-fb18-4d5c-8d00-b82e9e5752d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 777.152873] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance c55717f0-8ef2-4e55-b1cf-60f6faea9e5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 777.655952] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance b3a3bb81-843b-4227-bebf-a8079f98c0f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.159099] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 2d0b199f-e0f1-42e0-afb5-e08602aebf01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.662128] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance dadd9985-bca3-4207-927f-9490e0ae3f10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.664672] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 778.664672] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 778.793032] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Refreshing inventories for resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 778.808405] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Updating ProviderTree inventory for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 778.808405] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Updating inventory in ProviderTree for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 778.817886] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Refreshing aggregate associations for resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd, aggregates: None {{(pid=61998) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 778.837649] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Refreshing trait associations for resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=61998) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 779.134231] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6986e2c7-1c40-4507-9365-ebe949371f41 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.146295] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94097227-77f2-4c0d-b460-49c7a0ae30d8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.176599] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6243155f-bcd3-4554-ad67-89c0de69e499 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.184310] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce33007-26de-4605-ac38-b660d16d40f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.199377] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.702523] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 780.211022] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 780.211022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.630s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.211022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.071s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.212164] env[61998]: INFO nova.compute.claims [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.216164] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.216345] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Cleaning up deleted instances {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11527}} [ 780.722806] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] There are 4 instances to clean {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11536}} [ 780.723074] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: f0a011bb-4939-4384-885c-6ce482875b4e] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 780.974682] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.974938] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.228489] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: a8f6254f-b867-4967-b4fa-bb70f471f89d] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 781.533952] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed46e0af-fe0b-49a4-b9dc-0a11badf3982 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.542039] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd609163-1495-4ac4-b9b7-eb60f6751271 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.573626] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d85482-55d1-4404-a43e-233b1b65a600 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.581080] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b125b0d-f23c-4005-8ebb-bcf5556a4707 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.594167] env[61998]: DEBUG nova.compute.provider_tree [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.732419] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: e4ada227-b79a-457a-b063-dde99840aa14] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 782.097077] env[61998]: DEBUG nova.scheduler.client.report [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 782.235426] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2aabbd53-4c4d-4b53-8135-34cc5a17fd47] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 782.602197] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.602739] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 782.605295] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.482s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.606680] env[61998]: INFO nova.compute.claims [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.738574] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.738760] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Cleaning up deleted instances with incomplete migration {{(pid=61998) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11565}} [ 783.112072] env[61998]: DEBUG nova.compute.utils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 783.114444] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 783.115056] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 783.161724] env[61998]: DEBUG nova.policy [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '664c146362bf40dc880ec2fb4dcffd6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bce8b65663234390ba27b6173ce5e519', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 783.241692] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 783.447103] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Successfully created port: c4010d42-4f0e-46d7-9d8a-5f23f8659a44 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.617283] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 783.889532] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a695b418-2009-42d6-981d-2cc872c79fa2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.898436] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785ac28d-ccb8-416f-9d88-39cfaa34416c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.931773] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd97287-b0ab-40b7-a1c0-d0c6e35b5793 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.939356] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23af926a-38a7-4233-997a-018e1916ba83 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.952698] env[61998]: DEBUG nova.compute.provider_tree [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.163457] env[61998]: DEBUG nova.compute.manager [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Received event network-changed-c4010d42-4f0e-46d7-9d8a-5f23f8659a44 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 784.163661] env[61998]: DEBUG nova.compute.manager [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Refreshing instance network info cache due to event network-changed-c4010d42-4f0e-46d7-9d8a-5f23f8659a44. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 784.163868] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] Acquiring lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.164015] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] Acquired lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.164197] env[61998]: DEBUG nova.network.neutron [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Refreshing network info cache for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.374191] env[61998]: ERROR nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. [ 784.374191] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 784.374191] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 784.374191] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 784.374191] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.374191] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 784.374191] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.374191] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 784.374191] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.374191] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 784.374191] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.374191] env[61998]: ERROR nova.compute.manager raise self.value [ 784.374191] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.374191] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 784.374191] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.374191] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 784.375055] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.375055] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 784.375055] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. [ 784.375055] env[61998]: ERROR nova.compute.manager [ 784.375055] env[61998]: Traceback (most recent call last): [ 784.375055] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 784.375055] env[61998]: listener.cb(fileno) [ 784.375055] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.375055] env[61998]: result = function(*args, **kwargs) [ 784.375055] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 784.375055] env[61998]: return func(*args, **kwargs) [ 784.375055] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 784.375055] env[61998]: raise e [ 784.375055] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 784.375055] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 784.375055] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.375055] env[61998]: created_port_ids = self._update_ports_for_instance( [ 784.375055] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.375055] env[61998]: with excutils.save_and_reraise_exception(): [ 784.375055] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.375055] env[61998]: self.force_reraise() [ 784.375055] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.375055] env[61998]: raise self.value [ 784.375055] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.375055] env[61998]: updated_port = self._update_port( [ 784.375055] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.375055] env[61998]: _ensure_no_port_binding_failure(port) [ 784.375055] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.375055] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 784.375648] env[61998]: nova.exception.PortBindingFailed: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. [ 784.375648] env[61998]: Removing descriptor: 17 [ 784.455598] env[61998]: DEBUG nova.scheduler.client.report [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 784.628969] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 784.654614] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.654854] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.655011] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.655202] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.655346] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.655488] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.655694] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.655850] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.656023] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.656187] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.656401] env[61998]: DEBUG nova.virt.hardware [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.657299] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64316e9-feac-47f9-ab0d-e1f0263ec8af {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.665682] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e7230c-7af5-4b3e-beb8-46a3d28a5180 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.683651] env[61998]: ERROR nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Traceback (most recent call last): [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] yield resources [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self.driver.spawn(context, instance, image_meta, [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] vm_ref = self.build_virtual_machine(instance, [ 784.683651] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] vif_infos = vmwarevif.get_vif_info(self._session, [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] for vif in network_info: [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] return self._sync_wrapper(fn, *args, **kwargs) [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self.wait() [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self[:] = self._gt.wait() [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] return self._exit_event.wait() [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 784.683988] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] current.throw(*self._exc) [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] result = function(*args, **kwargs) [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] return func(*args, **kwargs) [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] raise e [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] nwinfo = self.network_api.allocate_for_instance( [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] created_port_ids = self._update_ports_for_instance( [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] with excutils.save_and_reraise_exception(): [ 784.684283] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self.force_reraise() [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] raise self.value [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] updated_port = self._update_port( [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] _ensure_no_port_binding_failure(port) [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] raise exception.PortBindingFailed(port_id=port['id']) [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] nova.exception.PortBindingFailed: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. [ 784.684575] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] [ 784.684575] env[61998]: INFO nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Terminating instance [ 784.686549] env[61998]: DEBUG nova.network.neutron [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.689162] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.772814] env[61998]: DEBUG nova.network.neutron [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.960542] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.961300] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 784.963979] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.648s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.276196] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe9c52da-6a50-4c6a-9ef3-05772a7a3682 req-01c750be-fdc9-41ef-85c4-e580915d8307 service nova] Releasing lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.277395] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquired lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.277395] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.469101] env[61998]: DEBUG nova.compute.utils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 785.474029] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 785.474029] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.520833] env[61998]: DEBUG nova.policy [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '664c146362bf40dc880ec2fb4dcffd6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bce8b65663234390ba27b6173ce5e519', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.720117] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d03a164-47f0-4933-9d76-2bb74a067f91 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.727877] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0352ddf-4733-4630-9123-852935dd221e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.758507] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9193711-28fa-406d-b801-124f2f273d60 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.765806] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b37dfd-397c-400a-80d2-d1628c05d6e9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.781888] env[61998]: DEBUG nova.compute.provider_tree [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.799105] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.827453] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Successfully created port: 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.895341] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.974671] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 786.187353] env[61998]: DEBUG nova.compute.manager [req-0e6120a2-d748-4a36-ac44-6621b269f146 req-b13a50ae-548b-4e80-9d16-6add98a8aaa6 service nova] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Received event network-vif-deleted-c4010d42-4f0e-46d7-9d8a-5f23f8659a44 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 786.284863] env[61998]: DEBUG nova.scheduler.client.report [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 786.399217] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Releasing lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.399217] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 786.399390] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.399710] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9485e9ce-815c-49b8-8eed-c24a6cc50d7f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.409083] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5174454-d41d-4898-9c4f-89e6a20b6340 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.430158] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5 could not be found. [ 786.430290] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 786.430459] env[61998]: INFO nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 786.430692] env[61998]: DEBUG oslo.service.loopingcall [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.431217] env[61998]: DEBUG nova.compute.manager [-] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 786.431491] env[61998]: DEBUG nova.network.neutron [-] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 786.446314] env[61998]: DEBUG nova.network.neutron [-] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.690864] env[61998]: ERROR nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. [ 786.690864] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 786.690864] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 786.690864] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 786.690864] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.690864] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 786.690864] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.690864] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 786.690864] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.690864] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 786.690864] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.690864] env[61998]: ERROR nova.compute.manager raise self.value [ 786.690864] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.690864] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 786.690864] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.690864] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 786.691546] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.691546] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 786.691546] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. [ 786.691546] env[61998]: ERROR nova.compute.manager [ 786.691546] env[61998]: Traceback (most recent call last): [ 786.691546] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 786.691546] env[61998]: listener.cb(fileno) [ 786.691546] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.691546] env[61998]: result = function(*args, **kwargs) [ 786.691546] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.691546] env[61998]: return func(*args, **kwargs) [ 786.691546] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 786.691546] env[61998]: raise e [ 786.691546] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 786.691546] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 786.691546] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.691546] env[61998]: created_port_ids = self._update_ports_for_instance( [ 786.691546] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.691546] env[61998]: with excutils.save_and_reraise_exception(): [ 786.691546] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.691546] env[61998]: self.force_reraise() [ 786.691546] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.691546] env[61998]: raise self.value [ 786.691546] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.691546] env[61998]: updated_port = self._update_port( [ 786.691546] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.691546] env[61998]: _ensure_no_port_binding_failure(port) [ 786.691546] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.691546] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 786.692313] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. [ 786.692313] env[61998]: Removing descriptor: 17 [ 786.790871] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.827s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.791531] env[61998]: ERROR nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Traceback (most recent call last): [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self.driver.spawn(context, instance, image_meta, [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] vm_ref = self.build_virtual_machine(instance, [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.791531] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] for vif in network_info: [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return self._sync_wrapper(fn, *args, **kwargs) [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self.wait() [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self[:] = self._gt.wait() [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return self._exit_event.wait() [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] result = hub.switch() [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 786.791905] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return self.greenlet.switch() [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] result = function(*args, **kwargs) [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] return func(*args, **kwargs) [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] raise e [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] nwinfo = self.network_api.allocate_for_instance( [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] created_port_ids = self._update_ports_for_instance( [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] with excutils.save_and_reraise_exception(): [ 786.792322] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] self.force_reraise() [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] raise self.value [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] updated_port = self._update_port( [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] _ensure_no_port_binding_failure(port) [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] raise exception.PortBindingFailed(port_id=port['id']) [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] nova.exception.PortBindingFailed: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. [ 786.792702] env[61998]: ERROR nova.compute.manager [instance: e632307a-ffe9-45a6-9224-8598aea5d269] [ 786.793017] env[61998]: DEBUG nova.compute.utils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.794096] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.269s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.797393] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Build of instance e632307a-ffe9-45a6-9224-8598aea5d269 was re-scheduled: Binding failed for port 0ff860cb-d237-47e4-bd6b-857bd5a53937, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 786.797810] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 786.798040] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.798187] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.798343] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.949075] env[61998]: DEBUG nova.network.neutron [-] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.983676] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 787.010436] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 787.010692] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 787.010848] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.011141] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 787.011177] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.011347] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 787.011550] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 787.011710] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 787.011875] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 787.012049] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 787.012271] env[61998]: DEBUG nova.virt.hardware [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 787.013156] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2653e2-9635-4446-89ca-18c32c75d3f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.020842] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a060dbe9-a2d4-48d0-bb51-1ddd815f1c45 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.035659] env[61998]: ERROR nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Traceback (most recent call last): [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] yield resources [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self.driver.spawn(context, instance, image_meta, [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self._vmops.spawn(context, instance, image_meta, injected_files, [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] vm_ref = self.build_virtual_machine(instance, [ 787.035659] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] vif_infos = vmwarevif.get_vif_info(self._session, [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] for vif in network_info: [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] return self._sync_wrapper(fn, *args, **kwargs) [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self.wait() [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self[:] = self._gt.wait() [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] return self._exit_event.wait() [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 787.036080] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] current.throw(*self._exc) [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] result = function(*args, **kwargs) [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] return func(*args, **kwargs) [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] raise e [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] nwinfo = self.network_api.allocate_for_instance( [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] created_port_ids = self._update_ports_for_instance( [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] with excutils.save_and_reraise_exception(): [ 787.036400] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self.force_reraise() [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] raise self.value [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] updated_port = self._update_port( [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] _ensure_no_port_binding_failure(port) [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] raise exception.PortBindingFailed(port_id=port['id']) [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] nova.exception.PortBindingFailed: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. [ 787.036764] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] [ 787.036764] env[61998]: INFO nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Terminating instance [ 787.038375] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.039048] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquired lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.039048] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.317012] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.409580] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.451165] env[61998]: INFO nova.compute.manager [-] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Took 1.02 seconds to deallocate network for instance. [ 787.455571] env[61998]: DEBUG nova.compute.claims [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 787.455745] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.556036] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449dcee3-e72d-4bb5-8d6b-7d6d6a0e6b9d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.560152] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.564548] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbf9ad4-3080-4209-9539-9390610ae6b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.594513] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2ed21c-388f-4c46-84d2-cdaff3170315 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.602014] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3f6b49-fb7a-46e8-a77c-06e7095ea789 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.615235] env[61998]: DEBUG nova.compute.provider_tree [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.781522] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.912565] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "refresh_cache-e632307a-ffe9-45a6-9224-8598aea5d269" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.912811] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 787.912996] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 787.913191] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.929647] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.118130] env[61998]: DEBUG nova.scheduler.client.report [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 788.211239] env[61998]: DEBUG nova.compute.manager [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Received event network-changed-7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 788.211432] env[61998]: DEBUG nova.compute.manager [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Refreshing instance network info cache due to event network-changed-7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 788.211638] env[61998]: DEBUG oslo_concurrency.lockutils [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] Acquiring lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.284066] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Releasing lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.284455] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 788.284640] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.284935] env[61998]: DEBUG oslo_concurrency.lockutils [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] Acquired lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.285118] env[61998]: DEBUG nova.network.neutron [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Refreshing network info cache for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.286162] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f15e536c-4e84-4e7d-8069-4f1685078c98 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.297032] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca167f5d-b7ec-4420-be9b-74aaf49f7b18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.318240] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad1084f8-a0c9-4783-af2d-aa677116a451 could not be found. [ 788.318443] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.318616] env[61998]: INFO nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Took 0.03 seconds to destroy the instance on the hypervisor. [ 788.318850] env[61998]: DEBUG oslo.service.loopingcall [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.319066] env[61998]: DEBUG nova.compute.manager [-] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 788.319160] env[61998]: DEBUG nova.network.neutron [-] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.333267] env[61998]: DEBUG nova.network.neutron [-] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.431949] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.622764] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.829s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.623412] env[61998]: ERROR nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Traceback (most recent call last): [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self.driver.spawn(context, instance, image_meta, [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] vm_ref = self.build_virtual_machine(instance, [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.623412] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] for vif in network_info: [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return self._sync_wrapper(fn, *args, **kwargs) [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self.wait() [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self[:] = self._gt.wait() [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return self._exit_event.wait() [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] result = hub.switch() [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 788.623751] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return self.greenlet.switch() [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] result = function(*args, **kwargs) [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] return func(*args, **kwargs) [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] raise e [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] nwinfo = self.network_api.allocate_for_instance( [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] created_port_ids = self._update_ports_for_instance( [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] with excutils.save_and_reraise_exception(): [ 788.624107] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] self.force_reraise() [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] raise self.value [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] updated_port = self._update_port( [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] _ensure_no_port_binding_failure(port) [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] raise exception.PortBindingFailed(port_id=port['id']) [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] nova.exception.PortBindingFailed: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. [ 788.624448] env[61998]: ERROR nova.compute.manager [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] [ 788.624749] env[61998]: DEBUG nova.compute.utils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 788.625748] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Build of instance b9ec575c-034b-46bc-afbd-7a8a07a8e005 was re-scheduled: Binding failed for port 60237559-6ae1-4690-8aed-f076fb3cc4d0, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 788.626160] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 788.626391] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.626539] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.626695] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.628011] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.873s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.630503] env[61998]: INFO nova.compute.claims [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.803834] env[61998]: DEBUG nova.network.neutron [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.834952] env[61998]: DEBUG nova.network.neutron [-] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.892886] env[61998]: DEBUG nova.network.neutron [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.935032] env[61998]: INFO nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: e632307a-ffe9-45a6-9224-8598aea5d269] Took 1.02 seconds to deallocate network for instance. [ 789.155088] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.235922] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.338195] env[61998]: INFO nova.compute.manager [-] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Took 1.02 seconds to deallocate network for instance. [ 789.340669] env[61998]: DEBUG nova.compute.claims [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 789.343092] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.395077] env[61998]: DEBUG oslo_concurrency.lockutils [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] Releasing lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.395349] env[61998]: DEBUG nova.compute.manager [req-d43965d4-11a3-4614-a661-255cab4cbb2e req-7b7441bc-7d52-41c7-93a9-0199b25cecec service nova] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Received event network-vif-deleted-7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 789.742667] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "refresh_cache-b9ec575c-034b-46bc-afbd-7a8a07a8e005" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.742995] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 789.743284] env[61998]: DEBUG nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 789.743468] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.757885] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.859523] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b5f129-fa0b-450f-8a46-ba8cd051eb3c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.868372] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bbdbd5-4792-42c0-8ca3-8644dbd20645 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.899345] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0baf50c9-4d41-4d77-a5a5-e3d4b74f349c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.907441] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c63289b-f2f2-4186-acac-745c2dd61419 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.920935] env[61998]: DEBUG nova.compute.provider_tree [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.961642] env[61998]: INFO nova.scheduler.client.report [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleted allocations for instance e632307a-ffe9-45a6-9224-8598aea5d269 [ 790.261149] env[61998]: DEBUG nova.network.neutron [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.423743] env[61998]: DEBUG nova.scheduler.client.report [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 790.471963] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "e632307a-ffe9-45a6-9224-8598aea5d269" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.477s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.763451] env[61998]: INFO nova.compute.manager [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: b9ec575c-034b-46bc-afbd-7a8a07a8e005] Took 1.02 seconds to deallocate network for instance. [ 790.929060] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.929658] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 790.932637] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.311s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.977919] env[61998]: DEBUG nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 791.440564] env[61998]: DEBUG nova.compute.utils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.442200] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 791.442413] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.493536] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.495336] env[61998]: DEBUG nova.policy [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff7d7fbfe61d4b2098ff0f3a484f2c5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f50481d08fa6441ab7db8a77aecf7aeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 791.672745] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b63e1b-275c-4add-b261-34588fbeb9fa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.682089] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ed103b-381c-4f5a-a40f-3b8698bd95fa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.710166] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d426d178-9a63-4c23-a54e-709850459869 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.717267] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4dc230a-e03b-40eb-b660-c2ea91c35094 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.731119] env[61998]: DEBUG nova.compute.provider_tree [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.789560] env[61998]: INFO nova.scheduler.client.report [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleted allocations for instance b9ec575c-034b-46bc-afbd-7a8a07a8e005 [ 791.795628] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Successfully created port: b8f88e55-67b3-4cb8-80bd-68fd47dc79fa {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.945997] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 792.234055] env[61998]: DEBUG nova.scheduler.client.report [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 792.300074] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f297096f-0bda-436a-9abb-06dbb065f26c tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "b9ec575c-034b-46bc-afbd-7a8a07a8e005" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 161.271s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.491783] env[61998]: DEBUG nova.compute.manager [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Received event network-changed-b8f88e55-67b3-4cb8-80bd-68fd47dc79fa {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 792.491968] env[61998]: DEBUG nova.compute.manager [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Refreshing instance network info cache due to event network-changed-b8f88e55-67b3-4cb8-80bd-68fd47dc79fa. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 792.492194] env[61998]: DEBUG oslo_concurrency.lockutils [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] Acquiring lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.492390] env[61998]: DEBUG oslo_concurrency.lockutils [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] Acquired lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.492567] env[61998]: DEBUG nova.network.neutron [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Refreshing network info cache for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.685952] env[61998]: ERROR nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. [ 792.685952] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 792.685952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 792.685952] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 792.685952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 792.685952] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 792.685952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 792.685952] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 792.685952] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 792.685952] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 792.685952] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 792.685952] env[61998]: ERROR nova.compute.manager raise self.value [ 792.685952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 792.685952] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 792.685952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 792.685952] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 792.686515] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 792.686515] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 792.686515] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. [ 792.686515] env[61998]: ERROR nova.compute.manager [ 792.686515] env[61998]: Traceback (most recent call last): [ 792.686515] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 792.686515] env[61998]: listener.cb(fileno) [ 792.686515] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 792.686515] env[61998]: result = function(*args, **kwargs) [ 792.686515] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 792.686515] env[61998]: return func(*args, **kwargs) [ 792.686515] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 792.686515] env[61998]: raise e [ 792.686515] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 792.686515] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 792.686515] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 792.686515] env[61998]: created_port_ids = self._update_ports_for_instance( [ 792.686515] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 792.686515] env[61998]: with excutils.save_and_reraise_exception(): [ 792.686515] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 792.686515] env[61998]: self.force_reraise() [ 792.686515] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 792.686515] env[61998]: raise self.value [ 792.686515] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 792.686515] env[61998]: updated_port = self._update_port( [ 792.686515] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 792.686515] env[61998]: _ensure_no_port_binding_failure(port) [ 792.686515] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 792.686515] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 792.687331] env[61998]: nova.exception.PortBindingFailed: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. [ 792.687331] env[61998]: Removing descriptor: 17 [ 792.739396] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.806s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.740013] env[61998]: ERROR nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Traceback (most recent call last): [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self.driver.spawn(context, instance, image_meta, [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] vm_ref = self.build_virtual_machine(instance, [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 792.740013] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] for vif in network_info: [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] return self._sync_wrapper(fn, *args, **kwargs) [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self.wait() [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self[:] = self._gt.wait() [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] return self._exit_event.wait() [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] current.throw(*self._exc) [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 792.740296] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] result = function(*args, **kwargs) [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] return func(*args, **kwargs) [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] raise e [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] nwinfo = self.network_api.allocate_for_instance( [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] created_port_ids = self._update_ports_for_instance( [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] with excutils.save_and_reraise_exception(): [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] self.force_reraise() [ 792.740577] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] raise self.value [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] updated_port = self._update_port( [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] _ensure_no_port_binding_failure(port) [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] raise exception.PortBindingFailed(port_id=port['id']) [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] nova.exception.PortBindingFailed: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. [ 792.741091] env[61998]: ERROR nova.compute.manager [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] [ 792.741091] env[61998]: DEBUG nova.compute.utils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 792.742053] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.614s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.744900] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Build of instance e49e77d6-8d5e-4d89-b129-ac34cd1969c1 was re-scheduled: Binding failed for port ddb3860b-63de-40cc-aaf6-bc715ccafb4e, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 792.745333] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 792.745562] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Acquiring lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.745683] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Acquired lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.745836] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 792.801820] env[61998]: DEBUG nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 792.963277] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 792.988187] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.988342] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.988505] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.988689] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.988843] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.988990] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.989204] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.989364] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.989531] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.989690] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.989858] env[61998]: DEBUG nova.virt.hardware [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.990733] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c987f963-9851-4976-988b-e9aee9656ce3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.001060] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684e0c5c-4fb6-4268-863b-120560790af0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.015517] env[61998]: ERROR nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Traceback (most recent call last): [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] yield resources [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self.driver.spawn(context, instance, image_meta, [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] vm_ref = self.build_virtual_machine(instance, [ 793.015517] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] for vif in network_info: [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] return self._sync_wrapper(fn, *args, **kwargs) [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self.wait() [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self[:] = self._gt.wait() [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] return self._exit_event.wait() [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 793.015795] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] current.throw(*self._exc) [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] result = function(*args, **kwargs) [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] return func(*args, **kwargs) [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] raise e [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] nwinfo = self.network_api.allocate_for_instance( [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] created_port_ids = self._update_ports_for_instance( [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] with excutils.save_and_reraise_exception(): [ 793.016141] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self.force_reraise() [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] raise self.value [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] updated_port = self._update_port( [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] _ensure_no_port_binding_failure(port) [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] raise exception.PortBindingFailed(port_id=port['id']) [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] nova.exception.PortBindingFailed: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. [ 793.016508] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] [ 793.016508] env[61998]: INFO nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Terminating instance [ 793.017845] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Acquiring lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.029273] env[61998]: DEBUG nova.network.neutron [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.113336] env[61998]: DEBUG nova.network.neutron [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.266111] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.323151] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.342355] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.489023] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2947fc98-21d0-4871-b490-6936f84ecaac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.495709] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11df7ec6-73a2-4069-8bf5-91947f271d00 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.525544] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c8b7f1-7e46-4142-9db7-52d7e5d95734 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.532571] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959088f1-7e6e-42be-9d92-6f38e041f4de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.545041] env[61998]: DEBUG nova.compute.provider_tree [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.616476] env[61998]: DEBUG oslo_concurrency.lockutils [req-90cdf322-30da-41bf-9d13-2cc042b986e5 req-ad5db399-6c3f-45cc-a2b9-619b5699f9b7 service nova] Releasing lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.616892] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Acquired lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.617090] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.846246] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Releasing lock "refresh_cache-e49e77d6-8d5e-4d89-b129-ac34cd1969c1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.846246] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 793.846246] env[61998]: DEBUG nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 793.846246] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 793.862027] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.048565] env[61998]: DEBUG nova.scheduler.client.report [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 794.139503] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.212596] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "d780cbdc-8838-42bf-8736-bc2dd60e659c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.212865] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.234189] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.234727] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.240481] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.365085] env[61998]: DEBUG nova.network.neutron [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.520641] env[61998]: DEBUG nova.compute.manager [req-b454956c-c3d4-4b1b-933e-01c098b42d43 req-2593c32a-6020-4d97-b97f-19ebc16f7d80 service nova] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Received event network-vif-deleted-b8f88e55-67b3-4cb8-80bd-68fd47dc79fa {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 794.554150] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.812s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.554896] env[61998]: ERROR nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Traceback (most recent call last): [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self.driver.spawn(context, instance, image_meta, [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self._vmops.spawn(context, instance, image_meta, injected_files, [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] vm_ref = self.build_virtual_machine(instance, [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] vif_infos = vmwarevif.get_vif_info(self._session, [ 794.554896] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] for vif in network_info: [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return self._sync_wrapper(fn, *args, **kwargs) [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self.wait() [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self[:] = self._gt.wait() [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return self._exit_event.wait() [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] result = hub.switch() [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 794.555213] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return self.greenlet.switch() [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] result = function(*args, **kwargs) [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] return func(*args, **kwargs) [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] raise e [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] nwinfo = self.network_api.allocate_for_instance( [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] created_port_ids = self._update_ports_for_instance( [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] with excutils.save_and_reraise_exception(): [ 794.555503] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] self.force_reraise() [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] raise self.value [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] updated_port = self._update_port( [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] _ensure_no_port_binding_failure(port) [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] raise exception.PortBindingFailed(port_id=port['id']) [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] nova.exception.PortBindingFailed: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. [ 794.555783] env[61998]: ERROR nova.compute.manager [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] [ 794.556046] env[61998]: DEBUG nova.compute.utils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 794.556780] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.838s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.559689] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Build of instance 3f2ed06f-27d0-4a73-a678-430db5334147 was re-scheduled: Binding failed for port 83835327-ef7b-48ef-b628-4e777b266394, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 794.560122] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 794.560452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquiring lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.560688] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Acquired lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.560876] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.742648] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Releasing lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.743078] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 794.743270] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.743560] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1069b9f6-8a37-41a1-b997-62fde25c706a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.754064] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7656cc69-4791-4d6b-bd96-8c32403655a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.773729] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 429bcfa3-8bca-42c3-9049-b7ae09438f47 could not be found. [ 794.773944] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.774144] env[61998]: INFO nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Took 0.03 seconds to destroy the instance on the hypervisor. [ 794.774401] env[61998]: DEBUG oslo.service.loopingcall [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.774689] env[61998]: DEBUG nova.compute.manager [-] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 794.774791] env[61998]: DEBUG nova.network.neutron [-] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.789324] env[61998]: DEBUG nova.network.neutron [-] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.868008] env[61998]: INFO nova.compute.manager [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] [instance: e49e77d6-8d5e-4d89-b129-ac34cd1969c1] Took 1.02 seconds to deallocate network for instance. [ 795.080700] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.175292] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.292286] env[61998]: DEBUG nova.network.neutron [-] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.328018] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50d0785-9636-403c-9a65-5528ebd04abb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.335257] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404f6936-8ea8-4390-93de-27c06830787f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.366361] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf23986-f305-471d-b527-0fd42e171829 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.376522] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241010c5-0c3f-4330-b5bb-2e81694b3bb8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.391885] env[61998]: DEBUG nova.compute.provider_tree [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.680458] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Releasing lock "refresh_cache-3f2ed06f-27d0-4a73-a678-430db5334147" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.680753] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 795.680907] env[61998]: DEBUG nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 795.681085] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.695667] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.795359] env[61998]: INFO nova.compute.manager [-] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Took 1.02 seconds to deallocate network for instance. [ 795.797998] env[61998]: DEBUG nova.compute.claims [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 795.798121] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.894135] env[61998]: DEBUG nova.scheduler.client.report [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 795.902837] env[61998]: INFO nova.scheduler.client.report [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Deleted allocations for instance e49e77d6-8d5e-4d89-b129-ac34cd1969c1 [ 796.198407] env[61998]: DEBUG nova.network.neutron [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.399161] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.842s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.399807] env[61998]: ERROR nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Traceback (most recent call last): [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self.driver.spawn(context, instance, image_meta, [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self._vmops.spawn(context, instance, image_meta, injected_files, [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] vm_ref = self.build_virtual_machine(instance, [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] vif_infos = vmwarevif.get_vif_info(self._session, [ 796.399807] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] for vif in network_info: [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] return self._sync_wrapper(fn, *args, **kwargs) [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self.wait() [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self[:] = self._gt.wait() [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] return self._exit_event.wait() [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] current.throw(*self._exc) [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 796.400095] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] result = function(*args, **kwargs) [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] return func(*args, **kwargs) [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] raise e [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] nwinfo = self.network_api.allocate_for_instance( [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] created_port_ids = self._update_ports_for_instance( [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] with excutils.save_and_reraise_exception(): [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] self.force_reraise() [ 796.400548] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] raise self.value [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] updated_port = self._update_port( [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] _ensure_no_port_binding_failure(port) [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] raise exception.PortBindingFailed(port_id=port['id']) [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] nova.exception.PortBindingFailed: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. [ 796.400883] env[61998]: ERROR nova.compute.manager [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] [ 796.400883] env[61998]: DEBUG nova.compute.utils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 796.401755] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.955s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.403291] env[61998]: INFO nova.compute.claims [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.405951] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Build of instance f5c91aad-0f8a-4ad3-8566-7f36ff983575 was re-scheduled: Binding failed for port 65c428f0-8266-4930-ba7c-15bf1d6c7bd8, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 796.406376] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 796.406597] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.406742] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.406894] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.410183] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5c89ba28-84c3-4d9b-874c-5ce911f527a4 tempest-ImagesNegativeTestJSON-132255802 tempest-ImagesNegativeTestJSON-132255802-project-member] Lock "e49e77d6-8d5e-4d89-b129-ac34cd1969c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 161.642s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.700693] env[61998]: INFO nova.compute.manager [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] [instance: 3f2ed06f-27d0-4a73-a678-430db5334147] Took 1.02 seconds to deallocate network for instance. [ 796.915432] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 796.947193] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.078130] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.442401] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.579290] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-f5c91aad-0f8a-4ad3-8566-7f36ff983575" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.579760] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 797.580020] env[61998]: DEBUG nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 797.580279] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.600100] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.730069] env[61998]: INFO nova.scheduler.client.report [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Deleted allocations for instance 3f2ed06f-27d0-4a73-a678-430db5334147 [ 797.777055] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebe6884-1cb6-4130-8663-498457c283bf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.785890] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7155e22b-0196-47ca-af27-b1284fd31371 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.821301] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f13f2f1-5b04-401b-a2f7-36b954813eef {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.827400] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d030e4-d618-4d92-bdb8-f2e6bffddc7c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.842893] env[61998]: DEBUG nova.compute.provider_tree [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.102808] env[61998]: DEBUG nova.network.neutron [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.242071] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6304b730-45c1-4be8-abd9-b72ba63b9987 tempest-ServersTestMultiNic-1860368674 tempest-ServersTestMultiNic-1860368674-project-member] Lock "3f2ed06f-27d0-4a73-a678-430db5334147" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.497s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.347019] env[61998]: DEBUG nova.scheduler.client.report [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 798.607985] env[61998]: INFO nova.compute.manager [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: f5c91aad-0f8a-4ad3-8566-7f36ff983575] Took 1.03 seconds to deallocate network for instance. [ 798.745167] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 798.852292] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.852876] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 798.855572] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.584s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.857074] env[61998]: INFO nova.compute.claims [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.273512] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.363204] env[61998]: DEBUG nova.compute.utils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.366524] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 799.366753] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 799.442780] env[61998]: DEBUG nova.policy [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '264431d91ffa4074ab7e9e6fc562616a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7b0f9307923448bbd7b245df28f97f1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 799.639395] env[61998]: INFO nova.scheduler.client.report [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted allocations for instance f5c91aad-0f8a-4ad3-8566-7f36ff983575 [ 799.870129] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 799.956096] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Successfully created port: bbe3eacb-3a70-427b-acc9-57cc108cd42c {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.149022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-78ca3adb-ce12-4185-9d92-9b7a5a098880 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "f5c91aad-0f8a-4ad3-8566-7f36ff983575" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.156s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.149022] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3281571f-ec0e-4cac-81ee-7b6741c27662 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.157542] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee08f59-b2b7-40a0-b3a9-9d4cff2b7634 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.192269] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6997b3-c35e-4043-a557-a0361f23b9fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.201192] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10ef536-eccb-45fe-84cc-6b38bde52714 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.217500] env[61998]: DEBUG nova.compute.provider_tree [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.660030] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 800.720618] env[61998]: DEBUG nova.scheduler.client.report [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 800.869105] env[61998]: DEBUG nova.compute.manager [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Received event network-changed-bbe3eacb-3a70-427b-acc9-57cc108cd42c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 800.869105] env[61998]: DEBUG nova.compute.manager [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Refreshing instance network info cache due to event network-changed-bbe3eacb-3a70-427b-acc9-57cc108cd42c. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 800.869105] env[61998]: DEBUG oslo_concurrency.lockutils [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] Acquiring lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.869105] env[61998]: DEBUG oslo_concurrency.lockutils [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] Acquired lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.869548] env[61998]: DEBUG nova.network.neutron [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Refreshing network info cache for port bbe3eacb-3a70-427b-acc9-57cc108cd42c {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.883287] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 800.911788] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.912030] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.912185] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.912364] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.912507] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.912690] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.912909] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.913334] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.913715] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.913916] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.914138] env[61998]: DEBUG nova.virt.hardware [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.915012] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0aa0a9-91e7-446a-83d9-501da8450bb3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.927078] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a34321-e817-4da2-8c6e-a1b651f64c8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.158592] env[61998]: ERROR nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. [ 801.158592] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 801.158592] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 801.158592] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 801.158592] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 801.158592] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 801.158592] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 801.158592] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 801.158592] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.158592] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 801.158592] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.158592] env[61998]: ERROR nova.compute.manager raise self.value [ 801.158592] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 801.158592] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 801.158592] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.158592] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 801.158996] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.158996] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 801.158996] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. [ 801.158996] env[61998]: ERROR nova.compute.manager [ 801.158996] env[61998]: Traceback (most recent call last): [ 801.158996] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 801.158996] env[61998]: listener.cb(fileno) [ 801.158996] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 801.158996] env[61998]: result = function(*args, **kwargs) [ 801.158996] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 801.158996] env[61998]: return func(*args, **kwargs) [ 801.158996] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 801.158996] env[61998]: raise e [ 801.158996] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 801.158996] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 801.158996] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 801.158996] env[61998]: created_port_ids = self._update_ports_for_instance( [ 801.158996] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 801.158996] env[61998]: with excutils.save_and_reraise_exception(): [ 801.158996] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.158996] env[61998]: self.force_reraise() [ 801.158996] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.158996] env[61998]: raise self.value [ 801.158996] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 801.158996] env[61998]: updated_port = self._update_port( [ 801.158996] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.158996] env[61998]: _ensure_no_port_binding_failure(port) [ 801.158996] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.158996] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 801.159651] env[61998]: nova.exception.PortBindingFailed: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. [ 801.159651] env[61998]: Removing descriptor: 17 [ 801.159651] env[61998]: ERROR nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Traceback (most recent call last): [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] yield resources [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self.driver.spawn(context, instance, image_meta, [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 801.159651] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] vm_ref = self.build_virtual_machine(instance, [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] for vif in network_info: [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return self._sync_wrapper(fn, *args, **kwargs) [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self.wait() [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self[:] = self._gt.wait() [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return self._exit_event.wait() [ 801.159941] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] result = hub.switch() [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return self.greenlet.switch() [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] result = function(*args, **kwargs) [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return func(*args, **kwargs) [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] raise e [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] nwinfo = self.network_api.allocate_for_instance( [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 801.160248] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] created_port_ids = self._update_ports_for_instance( [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] with excutils.save_and_reraise_exception(): [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self.force_reraise() [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] raise self.value [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] updated_port = self._update_port( [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] _ensure_no_port_binding_failure(port) [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.160589] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] raise exception.PortBindingFailed(port_id=port['id']) [ 801.160871] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] nova.exception.PortBindingFailed: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. [ 801.160871] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] [ 801.160871] env[61998]: INFO nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Terminating instance [ 801.165763] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.189058] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.226344] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.232573] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.773s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.406366] env[61998]: DEBUG nova.network.neutron [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.510128] env[61998]: DEBUG nova.network.neutron [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.738556] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquiring lock "443fb62b-47e0-4fa4-a0db-61d744375556" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.738556] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "443fb62b-47e0-4fa4-a0db-61d744375556" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.014238] env[61998]: DEBUG oslo_concurrency.lockutils [req-473fa162-ce57-4652-b392-d3d7197a749e req-030090fe-1c23-49d3-8957-521d480d305c service nova] Releasing lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.014917] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquired lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.015120] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.025483] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9de289a-d8b4-46a4-aba2-27fe049c6cad {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.033657] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d642d5fa-571e-4cd4-bbbc-9c4ba10d2c7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.063327] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ab94ae-a704-49fd-8c0b-8c7a8939e1bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.070414] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e061b6b3-0338-46fb-bd9b-a23c5cb34794 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.083935] env[61998]: DEBUG nova.compute.provider_tree [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.243627] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "443fb62b-47e0-4fa4-a0db-61d744375556" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.243627] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 802.436672] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.436909] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.534995] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.587614] env[61998]: DEBUG nova.scheduler.client.report [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 802.663158] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.747999] env[61998]: DEBUG nova.compute.utils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.750084] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 802.750290] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 802.959738] env[61998]: DEBUG nova.compute.manager [req-8424cc2b-c78a-4fb3-82f8-f192c528cf8b req-fd960582-4bed-4fee-a4fb-b0e13fcff11c service nova] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Received event network-vif-deleted-bbe3eacb-3a70-427b-acc9-57cc108cd42c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 802.998393] env[61998]: DEBUG nova.policy [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17ab3d573161409eac02aaae814f9817', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad99b62a40754715a52fa9d366f2c5df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 803.093962] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.865s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.097994] env[61998]: ERROR nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Traceback (most recent call last): [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self.driver.spawn(context, instance, image_meta, [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] vm_ref = self.build_virtual_machine(instance, [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.097994] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] for vif in network_info: [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] return self._sync_wrapper(fn, *args, **kwargs) [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self.wait() [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self[:] = self._gt.wait() [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] return self._exit_event.wait() [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] current.throw(*self._exc) [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.098315] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] result = function(*args, **kwargs) [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] return func(*args, **kwargs) [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] raise e [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] nwinfo = self.network_api.allocate_for_instance( [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] created_port_ids = self._update_ports_for_instance( [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] with excutils.save_and_reraise_exception(): [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] self.force_reraise() [ 803.098595] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] raise self.value [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] updated_port = self._update_port( [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] _ensure_no_port_binding_failure(port) [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] raise exception.PortBindingFailed(port_id=port['id']) [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] nova.exception.PortBindingFailed: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. [ 803.098879] env[61998]: ERROR nova.compute.manager [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] [ 803.098879] env[61998]: DEBUG nova.compute.utils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 803.099990] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.759s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.102997] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Build of instance bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5 was re-scheduled: Binding failed for port c4010d42-4f0e-46d7-9d8a-5f23f8659a44, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 803.103420] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 803.103682] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.103842] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquired lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.104009] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 803.166539] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Releasing lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.166539] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 803.166662] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 803.167135] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-512b5013-9110-4d44-ad8c-4ecf51313b16 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.176037] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a384892e-6f16-4367-9359-267059aedfee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.210626] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2963f997-eb4c-4bfd-be28-6c1b383598c6 could not be found. [ 803.210626] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.210626] env[61998]: INFO nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 803.210626] env[61998]: DEBUG oslo.service.loopingcall [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.210888] env[61998]: DEBUG nova.compute.manager [-] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 803.211731] env[61998]: DEBUG nova.network.neutron [-] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.232564] env[61998]: DEBUG nova.network.neutron [-] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.251295] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 803.495832] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Successfully created port: 3e49bdec-f837-41fb-a086-6e21c3fcbe48 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.625422] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.707021] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.737921] env[61998]: DEBUG nova.network.neutron [-] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.877910] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f63f71-30b1-43e5-a11d-31a1c0a6ee99 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.892124] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c0541a-d983-438e-850f-9bc43bf6c2d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.932642] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7151ad-0415-4de3-bcb3-9898ba2528ba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.940549] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619819cf-52e8-4eab-8922-a691118c602b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.954657] env[61998]: DEBUG nova.compute.provider_tree [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.212237] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Releasing lock "refresh_cache-bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.212495] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 804.212747] env[61998]: DEBUG nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 804.212931] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.239658] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.241944] env[61998]: INFO nova.compute.manager [-] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Took 1.03 seconds to deallocate network for instance. [ 804.246651] env[61998]: DEBUG nova.compute.claims [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 804.246829] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.262394] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 804.299804] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 804.300117] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 804.300312] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.300495] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 804.300810] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.300870] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 804.301123] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 804.301285] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 804.301450] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 804.301605] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 804.301793] env[61998]: DEBUG nova.virt.hardware [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.304870] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2917f7-076d-49aa-8c23-010f3f71ad7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.311881] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7e3c64-0fc5-4e59-9998-23ca7dabdc5a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.460019] env[61998]: DEBUG nova.scheduler.client.report [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 804.745377] env[61998]: DEBUG nova.network.neutron [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.954952] env[61998]: ERROR nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. [ 804.954952] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 804.954952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 804.954952] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 804.954952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.954952] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 804.954952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.954952] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 804.954952] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.954952] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 804.954952] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.954952] env[61998]: ERROR nova.compute.manager raise self.value [ 804.954952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.954952] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 804.954952] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.954952] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 804.955585] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.955585] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 804.955585] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. [ 804.955585] env[61998]: ERROR nova.compute.manager [ 804.955585] env[61998]: Traceback (most recent call last): [ 804.955585] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 804.955585] env[61998]: listener.cb(fileno) [ 804.955585] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.955585] env[61998]: result = function(*args, **kwargs) [ 804.955585] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.955585] env[61998]: return func(*args, **kwargs) [ 804.955585] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 804.955585] env[61998]: raise e [ 804.955585] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 804.955585] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 804.955585] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.955585] env[61998]: created_port_ids = self._update_ports_for_instance( [ 804.955585] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.955585] env[61998]: with excutils.save_and_reraise_exception(): [ 804.955585] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.955585] env[61998]: self.force_reraise() [ 804.955585] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.955585] env[61998]: raise self.value [ 804.955585] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.955585] env[61998]: updated_port = self._update_port( [ 804.955585] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.955585] env[61998]: _ensure_no_port_binding_failure(port) [ 804.955585] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.955585] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 804.956346] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. [ 804.956346] env[61998]: Removing descriptor: 17 [ 804.956346] env[61998]: ERROR nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] Traceback (most recent call last): [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] yield resources [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self.driver.spawn(context, instance, image_meta, [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.956346] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] vm_ref = self.build_virtual_machine(instance, [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] for vif in network_info: [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return self._sync_wrapper(fn, *args, **kwargs) [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self.wait() [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self[:] = self._gt.wait() [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return self._exit_event.wait() [ 804.956615] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] result = hub.switch() [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return self.greenlet.switch() [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] result = function(*args, **kwargs) [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return func(*args, **kwargs) [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] raise e [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] nwinfo = self.network_api.allocate_for_instance( [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.957037] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] created_port_ids = self._update_ports_for_instance( [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] with excutils.save_and_reraise_exception(): [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self.force_reraise() [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] raise self.value [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] updated_port = self._update_port( [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] _ensure_no_port_binding_failure(port) [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.957328] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] raise exception.PortBindingFailed(port_id=port['id']) [ 804.957600] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] nova.exception.PortBindingFailed: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. [ 804.957600] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] [ 804.957600] env[61998]: INFO nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Terminating instance [ 804.959042] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquiring lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.959501] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquired lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.959568] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.962521] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.863s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.963152] env[61998]: ERROR nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Traceback (most recent call last): [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self.driver.spawn(context, instance, image_meta, [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] vm_ref = self.build_virtual_machine(instance, [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.963152] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] for vif in network_info: [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] return self._sync_wrapper(fn, *args, **kwargs) [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self.wait() [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self[:] = self._gt.wait() [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] return self._exit_event.wait() [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] current.throw(*self._exc) [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.964205] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] result = function(*args, **kwargs) [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] return func(*args, **kwargs) [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] raise e [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] nwinfo = self.network_api.allocate_for_instance( [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] created_port_ids = self._update_ports_for_instance( [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] with excutils.save_and_reraise_exception(): [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] self.force_reraise() [ 804.964669] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] raise self.value [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] updated_port = self._update_port( [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] _ensure_no_port_binding_failure(port) [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] raise exception.PortBindingFailed(port_id=port['id']) [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] nova.exception.PortBindingFailed: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. [ 804.965025] env[61998]: ERROR nova.compute.manager [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] [ 804.965025] env[61998]: DEBUG nova.compute.utils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 804.966679] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.473s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.969977] env[61998]: INFO nova.compute.claims [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.973756] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Build of instance ad1084f8-a0c9-4783-af2d-aa677116a451 was re-scheduled: Binding failed for port 7d675ffe-d6b8-4f00-8596-3f1beb4f9ca0, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 804.974198] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 804.974417] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquiring lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.974719] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Acquired lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.974719] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.002230] env[61998]: DEBUG nova.compute.manager [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] [instance: 08e60642-0784-4898-9de5-444a24fba508] Received event network-changed-3e49bdec-f837-41fb-a086-6e21c3fcbe48 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 805.002297] env[61998]: DEBUG nova.compute.manager [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] [instance: 08e60642-0784-4898-9de5-444a24fba508] Refreshing instance network info cache due to event network-changed-3e49bdec-f837-41fb-a086-6e21c3fcbe48. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 805.003234] env[61998]: DEBUG oslo_concurrency.lockutils [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] Acquiring lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.247972] env[61998]: INFO nova.compute.manager [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5] Took 1.03 seconds to deallocate network for instance. [ 805.477494] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.566660] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.661885] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.877394] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.071442] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Releasing lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.071442] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 806.071726] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.072087] env[61998]: DEBUG oslo_concurrency.lockutils [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] Acquired lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.072087] env[61998]: DEBUG nova.network.neutron [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] [instance: 08e60642-0784-4898-9de5-444a24fba508] Refreshing network info cache for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 806.078334] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd887c44-a62e-427c-bf05-5341fd66204d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.098354] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1da99a-19bb-4aa8-a15c-a43808f6c47a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.124136] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 08e60642-0784-4898-9de5-444a24fba508 could not be found. [ 806.124376] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.126399] env[61998]: INFO nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Took 0.05 seconds to destroy the instance on the hypervisor. [ 806.126399] env[61998]: DEBUG oslo.service.loopingcall [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.126399] env[61998]: DEBUG nova.compute.manager [-] [instance: 08e60642-0784-4898-9de5-444a24fba508] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 806.126399] env[61998]: DEBUG nova.network.neutron [-] [instance: 08e60642-0784-4898-9de5-444a24fba508] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.156441] env[61998]: DEBUG nova.network.neutron [-] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.279050] env[61998]: INFO nova.scheduler.client.report [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Deleted allocations for instance bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5 [ 806.334221] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bffc03d-02cb-4873-a04b-31ca78fb486c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.342783] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b96910b-28b7-4846-b228-f1a5b6fe74b2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.380424] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e2f468-95c5-4a3c-8fb3-24e7b5bc6dd3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.384916] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Releasing lock "refresh_cache-ad1084f8-a0c9-4783-af2d-aa677116a451" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.385186] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 806.385373] env[61998]: DEBUG nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 806.385536] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.392978] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2f319d-f984-49df-944a-01a8bd74d94d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.406739] env[61998]: DEBUG nova.compute.provider_tree [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.423020] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.612789] env[61998]: DEBUG nova.network.neutron [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.659135] env[61998]: DEBUG nova.network.neutron [-] [instance: 08e60642-0784-4898-9de5-444a24fba508] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.790840] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fa44fb15-5273-453b-a88e-6fedcf1a8752 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "bd0d07d8-cbdb-43b4-9b6c-9aedb64a5fe5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.628s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.808775] env[61998]: DEBUG nova.network.neutron [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] [instance: 08e60642-0784-4898-9de5-444a24fba508] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.909600] env[61998]: DEBUG nova.scheduler.client.report [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 806.925916] env[61998]: DEBUG nova.network.neutron [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.162870] env[61998]: INFO nova.compute.manager [-] [instance: 08e60642-0784-4898-9de5-444a24fba508] Took 1.04 seconds to deallocate network for instance. [ 807.166093] env[61998]: DEBUG nova.compute.claims [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 807.169574] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.294594] env[61998]: DEBUG nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 807.313352] env[61998]: DEBUG oslo_concurrency.lockutils [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] Releasing lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.313352] env[61998]: DEBUG nova.compute.manager [req-d7c16b21-1f44-4f9e-83c4-dd00ff80fcc9 req-fc9d57b0-5f7b-4eb4-895e-58056e45014c service nova] [instance: 08e60642-0784-4898-9de5-444a24fba508] Received event network-vif-deleted-3e49bdec-f837-41fb-a086-6e21c3fcbe48 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 807.419201] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.419201] env[61998]: DEBUG nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 807.419914] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.097s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.423160] env[61998]: INFO nova.compute.claims [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.433567] env[61998]: INFO nova.compute.manager [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] [instance: ad1084f8-a0c9-4783-af2d-aa677116a451] Took 1.04 seconds to deallocate network for instance. [ 807.819165] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.927994] env[61998]: DEBUG nova.compute.utils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 807.931723] env[61998]: DEBUG nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Not allocating networking since 'none' was specified. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 808.433119] env[61998]: DEBUG nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 808.473693] env[61998]: INFO nova.scheduler.client.report [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Deleted allocations for instance ad1084f8-a0c9-4783-af2d-aa677116a451 [ 808.693851] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96b89bb-cfce-450e-a5c0-0957c9483637 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.704728] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e8f204-49e9-47f3-a8a0-ff840ada0432 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.736804] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa0c190-21de-441b-abbf-5d5eb8cf17b5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.744385] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e00ea5-5ea6-468e-8b59-52f71cb2be5f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.759096] env[61998]: DEBUG nova.compute.provider_tree [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.985596] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cdbaaf25-470b-4388-addc-e4b5bc8edeb2 tempest-ServerRescueNegativeTestJSON-1473706676 tempest-ServerRescueNegativeTestJSON-1473706676-project-member] Lock "ad1084f8-a0c9-4783-af2d-aa677116a451" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.335s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.104820] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "bcb05670-dc58-46be-a4a9-58a260e4132f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.108230] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.264754] env[61998]: DEBUG nova.scheduler.client.report [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 809.390116] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "f3089d53-9c8f-4276-8e2e-0518cf29004b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.390535] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.452233] env[61998]: DEBUG nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 809.492094] env[61998]: DEBUG nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 809.496034] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 809.496034] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 809.496034] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.496034] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 809.496276] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.496276] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 809.496276] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 809.496276] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 809.496276] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 809.496481] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 809.496481] env[61998]: DEBUG nova.virt.hardware [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 809.497197] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd7cdd9-57ab-402f-b332-e67c4fabf764 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.506055] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adc3a4e-5427-4395-abab-c8625a650455 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.524929] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 809.532100] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Creating folder: Project (7b91b70087d74f04b43da65cc8cd258b). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.532100] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-accc3a1c-b087-4656-9395-af32945e98e5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.543728] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Created folder: Project (7b91b70087d74f04b43da65cc8cd258b) in parent group-v294665. [ 809.543728] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Creating folder: Instances. Parent ref: group-v294689. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.543728] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be434846-d068-4f7d-8a29-cf7a7b466d87 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.553901] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Created folder: Instances in parent group-v294689. [ 809.554131] env[61998]: DEBUG oslo.service.loopingcall [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.554353] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 809.554602] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87ab1b2f-e345-4f8d-bd84-0729dc8eb4fb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.571958] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 809.571958] env[61998]: value = "task-1388457" [ 809.571958] env[61998]: _type = "Task" [ 809.571958] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.580285] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388457, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.770377] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.770999] env[61998]: DEBUG nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 809.774309] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.976s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.029219] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.081739] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388457, 'name': CreateVM_Task, 'duration_secs': 0.275653} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.081913] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 810.082344] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.082502] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.082859] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 810.083118] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4337f17-0591-469d-a0ad-0d224861ed6b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.087379] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 810.087379] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]522a9c9b-0977-bb82-80a2-c95313138ee9" [ 810.087379] env[61998]: _type = "Task" [ 810.087379] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.094624] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522a9c9b-0977-bb82-80a2-c95313138ee9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.276366] env[61998]: DEBUG nova.compute.utils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 810.277818] env[61998]: DEBUG nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Not allocating networking since 'none' was specified. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 810.575807] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2cf7d9-05b3-4b77-94bc-101926dbeefb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.586787] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429d046b-bbf7-4165-8888-8eaf0c25a36a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.598796] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522a9c9b-0977-bb82-80a2-c95313138ee9, 'name': SearchDatastore_Task, 'duration_secs': 0.010424} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.622090] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.622365] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 810.622604] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.622750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.622964] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 810.623409] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43ec4056-2360-4fdf-b6b0-daf37ea00fbf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.627957] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91047b77-43a3-48d5-9fe5-52b3c97c2aac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.633866] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240fdc8c-1556-44d2-8151-7d32bd851545 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.640048] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.640048] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.640048] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7adb07b-634d-4af7-9aac-7fe567d433a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.649468] env[61998]: DEBUG nova.compute.provider_tree [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.653524] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 810.653524] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52492aaa-9bc2-21e5-82fd-caac457dbf15" [ 810.653524] env[61998]: _type = "Task" [ 810.653524] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.661029] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52492aaa-9bc2-21e5-82fd-caac457dbf15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.782921] env[61998]: DEBUG nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 811.152689] env[61998]: DEBUG nova.scheduler.client.report [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 811.166200] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52492aaa-9bc2-21e5-82fd-caac457dbf15, 'name': SearchDatastore_Task, 'duration_secs': 0.008612} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.166806] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b58b624a-614c-40f4-8d23-a85c07776958 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.172517] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 811.172517] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5281045a-06e0-bf42-263a-875ea4312ab6" [ 811.172517] env[61998]: _type = "Task" [ 811.172517] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.180926] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5281045a-06e0-bf42-263a-875ea4312ab6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.658194] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.884s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.658873] env[61998]: ERROR nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Traceback (most recent call last): [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self.driver.spawn(context, instance, image_meta, [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] vm_ref = self.build_virtual_machine(instance, [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] vif_infos = vmwarevif.get_vif_info(self._session, [ 811.658873] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] for vif in network_info: [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] return self._sync_wrapper(fn, *args, **kwargs) [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self.wait() [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self[:] = self._gt.wait() [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] return self._exit_event.wait() [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] current.throw(*self._exc) [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.659308] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] result = function(*args, **kwargs) [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] return func(*args, **kwargs) [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] raise e [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] nwinfo = self.network_api.allocate_for_instance( [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] created_port_ids = self._update_ports_for_instance( [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] with excutils.save_and_reraise_exception(): [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] self.force_reraise() [ 811.659626] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] raise self.value [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] updated_port = self._update_port( [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] _ensure_no_port_binding_failure(port) [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] raise exception.PortBindingFailed(port_id=port['id']) [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] nova.exception.PortBindingFailed: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. [ 811.659913] env[61998]: ERROR nova.compute.manager [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] [ 811.659913] env[61998]: DEBUG nova.compute.utils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 811.660925] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.219s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.662667] env[61998]: INFO nova.compute.claims [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.665453] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Build of instance 429bcfa3-8bca-42c3-9049-b7ae09438f47 was re-scheduled: Binding failed for port b8f88e55-67b3-4cb8-80bd-68fd47dc79fa, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 811.665669] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 811.665893] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Acquiring lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.666053] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Acquired lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.666211] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.682954] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5281045a-06e0-bf42-263a-875ea4312ab6, 'name': SearchDatastore_Task, 'duration_secs': 0.008958} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.683222] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.683463] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 5eb786f1-7789-48a0-a04e-a4039e387f58/5eb786f1-7789-48a0-a04e-a4039e387f58.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 811.683701] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5ffa5f6-8fc4-4a72-b7e3-28b06f3c19d7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.691369] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 811.691369] env[61998]: value = "task-1388458" [ 811.691369] env[61998]: _type = "Task" [ 811.691369] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.698538] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388458, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.791438] env[61998]: DEBUG nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 811.820165] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.820437] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.820603] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.820796] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.820978] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.821149] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.821368] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.821535] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.821711] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.821881] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.822081] env[61998]: DEBUG nova.virt.hardware [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.823028] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e4ead1-beb7-49a3-8397-1ba530bad2bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.830721] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4e9bdf-9f2c-4c85-bcdc-05a7022f8074 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.844221] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 811.849871] env[61998]: DEBUG oslo.service.loopingcall [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.850166] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 811.850375] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3e461dc-9bf0-4461-80fa-dc457250c9e9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.869547] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 811.869547] env[61998]: value = "task-1388459" [ 811.869547] env[61998]: _type = "Task" [ 811.869547] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.877744] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388459, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.188335] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.202953] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388458, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.266728] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.379305] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388459, 'name': CreateVM_Task, 'duration_secs': 0.28086} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.379475] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 812.379883] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.380057] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.380371] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 812.380614] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2118c046-c2c8-40fa-b441-0d9c1aab236b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.384722] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 812.384722] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52d2b18b-ecee-d996-a982-9021757f0bf2" [ 812.384722] env[61998]: _type = "Task" [ 812.384722] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.391677] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52d2b18b-ecee-d996-a982-9021757f0bf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.705379] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388458, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536976} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.705692] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 5eb786f1-7789-48a0-a04e-a4039e387f58/5eb786f1-7789-48a0-a04e-a4039e387f58.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 812.705909] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 812.706178] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed956a7c-c0ea-4ef5-a624-01b31ef160bf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.712593] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 812.712593] env[61998]: value = "task-1388460" [ 812.712593] env[61998]: _type = "Task" [ 812.712593] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.719644] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388460, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.770914] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Releasing lock "refresh_cache-429bcfa3-8bca-42c3-9049-b7ae09438f47" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.771166] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 812.771350] env[61998]: DEBUG nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 812.771514] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 812.787586] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.895837] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52d2b18b-ecee-d996-a982-9021757f0bf2, 'name': SearchDatastore_Task, 'duration_secs': 0.00883} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.896880] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.897023] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 812.897258] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.897401] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.897578] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.898308] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d39d294-d5a0-4c63-8add-15a81300f933 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.900663] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1154e3b2-dec5-43c2-a192-1e5af03a6b27 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.906746] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb679d8c-9959-437e-9ad6-c35545d8fc96 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.910369] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.911025] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 812.911463] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e80f56d-1388-4f63-8ca4-8e393d308946 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.938932] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72af817f-9faf-4106-ad53-661e8a28024a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.942459] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 812.942459] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cab4d2-c2a4-98e5-ccc5-07f84c1f595a" [ 812.942459] env[61998]: _type = "Task" [ 812.942459] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.950047] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f16b2e-4f65-4d63-9f03-c78fd7fc4586 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.956455] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cab4d2-c2a4-98e5-ccc5-07f84c1f595a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.965884] env[61998]: DEBUG nova.compute.provider_tree [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.222355] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388460, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064719} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.222627] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 813.223590] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c447cad5-e762-4674-9b30-13b97c261296 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.243851] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 5eb786f1-7789-48a0-a04e-a4039e387f58/5eb786f1-7789-48a0-a04e-a4039e387f58.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 813.244160] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab83f585-ef11-408a-9d63-82b15cb907b5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.262780] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 813.262780] env[61998]: value = "task-1388461" [ 813.262780] env[61998]: _type = "Task" [ 813.262780] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.270124] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388461, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.290819] env[61998]: DEBUG nova.network.neutron [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.455109] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cab4d2-c2a4-98e5-ccc5-07f84c1f595a, 'name': SearchDatastore_Task, 'duration_secs': 0.011393} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.455109] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50a1fd39-c320-4e05-94ce-b1fc805ff1ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.458892] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 813.458892] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52aee980-7ad2-a718-a973-cac6663a4ae8" [ 813.458892] env[61998]: _type = "Task" [ 813.458892] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.466127] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52aee980-7ad2-a718-a973-cac6663a4ae8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.469016] env[61998]: DEBUG nova.scheduler.client.report [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 813.772928] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388461, 'name': ReconfigVM_Task, 'duration_secs': 0.297309} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.773238] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 5eb786f1-7789-48a0-a04e-a4039e387f58/5eb786f1-7789-48a0-a04e-a4039e387f58.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.773821] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0995ef6-46e4-48c1-933b-600b0ae15826 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.780303] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 813.780303] env[61998]: value = "task-1388462" [ 813.780303] env[61998]: _type = "Task" [ 813.780303] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.787588] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388462, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.793152] env[61998]: INFO nova.compute.manager [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] [instance: 429bcfa3-8bca-42c3-9049-b7ae09438f47] Took 1.02 seconds to deallocate network for instance. [ 813.970892] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52aee980-7ad2-a718-a973-cac6663a4ae8, 'name': SearchDatastore_Task, 'duration_secs': 0.030541} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.971176] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.971430] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 813.971679] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3bde630-be8d-42f8-8535-5fefcdc31206 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.973988] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.974471] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 813.977570] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.704s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.979058] env[61998]: INFO nova.compute.claims [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.986363] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 813.986363] env[61998]: value = "task-1388463" [ 813.986363] env[61998]: _type = "Task" [ 813.986363] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.993898] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.291491] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388462, 'name': Rename_Task, 'duration_secs': 0.132596} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.291784] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.292098] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f614ec5c-450c-4232-aaee-b16773537a67 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.301949] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 814.301949] env[61998]: value = "task-1388464" [ 814.301949] env[61998]: _type = "Task" [ 814.301949] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.313974] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388464, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.479152] env[61998]: DEBUG nova.compute.utils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 814.481035] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 814.481035] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.496496] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388463, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.535838] env[61998]: DEBUG nova.policy [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17f109d724201a22264aa6ee02ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82b8854f80cf48628167fd6f678d7dd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 814.803546] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Successfully created port: 853b8636-f233-4b7c-a320-d00ec813dc5e {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.818807] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388464, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.835692] env[61998]: INFO nova.scheduler.client.report [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Deleted allocations for instance 429bcfa3-8bca-42c3-9049-b7ae09438f47 [ 814.985049] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 814.999586] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388463, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51225} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.999833] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 815.000050] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.000298] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4facf847-a4a2-4f70-9a3f-43fd223c8534 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.006736] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 815.006736] env[61998]: value = "task-1388465" [ 815.006736] env[61998]: _type = "Task" [ 815.006736] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.014244] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.241696] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318b910e-e476-4b4b-9547-e3ca21cf6740 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.248385] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db5446e-406e-46ab-af28-d50c6dfe3f8b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.279043] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5343c9-b657-4753-9601-daf4444af08b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.286775] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80dfcf4f-c3ee-42c8-b856-87280318f2a7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.300551] env[61998]: DEBUG nova.compute.provider_tree [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.316864] env[61998]: DEBUG oslo_vmware.api [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388464, 'name': PowerOnVM_Task, 'duration_secs': 0.613139} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.317728] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.317957] env[61998]: INFO nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Took 5.87 seconds to spawn the instance on the hypervisor. [ 815.318461] env[61998]: DEBUG nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 815.318900] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c151b55-630b-41f3-9f07-48dca1aad4f9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.348553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ef0e6b5b-0310-4cc7-bdc2-b82d23356d9f tempest-ServersTestJSON-204574708 tempest-ServersTestJSON-204574708-project-member] Lock "429bcfa3-8bca-42c3-9049-b7ae09438f47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.117s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.517780] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06271} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.517780] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 815.517780] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd4d0bb-671f-4a82-acd7-44f0f3aa1709 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.537769] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.538854] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7d24105-ed43-4540-b387-05389ba6881d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.563125] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 815.563125] env[61998]: value = "task-1388466" [ 815.563125] env[61998]: _type = "Task" [ 815.563125] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.573173] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388466, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.629889] env[61998]: DEBUG nova.compute.manager [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Received event network-changed-853b8636-f233-4b7c-a320-d00ec813dc5e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 815.633876] env[61998]: DEBUG nova.compute.manager [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Refreshing instance network info cache due to event network-changed-853b8636-f233-4b7c-a320-d00ec813dc5e. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 815.633876] env[61998]: DEBUG oslo_concurrency.lockutils [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] Acquiring lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.633876] env[61998]: DEBUG oslo_concurrency.lockutils [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] Acquired lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.633876] env[61998]: DEBUG nova.network.neutron [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Refreshing network info cache for port 853b8636-f233-4b7c-a320-d00ec813dc5e {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 815.784731] env[61998]: ERROR nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. [ 815.784731] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 815.784731] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 815.784731] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 815.784731] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 815.784731] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 815.784731] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 815.784731] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 815.784731] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.784731] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 815.784731] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.784731] env[61998]: ERROR nova.compute.manager raise self.value [ 815.784731] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 815.784731] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 815.784731] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.784731] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 815.785526] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.785526] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 815.785526] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. [ 815.785526] env[61998]: ERROR nova.compute.manager [ 815.785526] env[61998]: Traceback (most recent call last): [ 815.785526] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 815.785526] env[61998]: listener.cb(fileno) [ 815.785526] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 815.785526] env[61998]: result = function(*args, **kwargs) [ 815.785526] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 815.785526] env[61998]: return func(*args, **kwargs) [ 815.785526] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 815.785526] env[61998]: raise e [ 815.785526] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 815.785526] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 815.785526] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 815.785526] env[61998]: created_port_ids = self._update_ports_for_instance( [ 815.785526] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 815.785526] env[61998]: with excutils.save_and_reraise_exception(): [ 815.785526] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.785526] env[61998]: self.force_reraise() [ 815.785526] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.785526] env[61998]: raise self.value [ 815.785526] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 815.785526] env[61998]: updated_port = self._update_port( [ 815.785526] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.785526] env[61998]: _ensure_no_port_binding_failure(port) [ 815.785526] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.785526] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 815.786300] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. [ 815.786300] env[61998]: Removing descriptor: 15 [ 815.804559] env[61998]: DEBUG nova.scheduler.client.report [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 815.834902] env[61998]: INFO nova.compute.manager [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Took 24.35 seconds to build instance. [ 815.851765] env[61998]: DEBUG nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 815.996768] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 816.022236] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 816.022495] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 816.022647] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.022795] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 816.022915] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.023256] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 816.023498] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 816.023667] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 816.023837] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 816.024204] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 816.024443] env[61998]: DEBUG nova.virt.hardware [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.025640] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b72935-9a1b-45ec-8566-dbbfca4acdaf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.033636] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88dcc3a9-ff64-4dc5-850e-d6dd03fef091 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.046975] env[61998]: ERROR nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Traceback (most recent call last): [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] yield resources [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self.driver.spawn(context, instance, image_meta, [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] vm_ref = self.build_virtual_machine(instance, [ 816.046975] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] vif_infos = vmwarevif.get_vif_info(self._session, [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] for vif in network_info: [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] return self._sync_wrapper(fn, *args, **kwargs) [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self.wait() [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self[:] = self._gt.wait() [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] return self._exit_event.wait() [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 816.047339] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] current.throw(*self._exc) [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] result = function(*args, **kwargs) [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] return func(*args, **kwargs) [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] raise e [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] nwinfo = self.network_api.allocate_for_instance( [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] created_port_ids = self._update_ports_for_instance( [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] with excutils.save_and_reraise_exception(): [ 816.047697] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self.force_reraise() [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] raise self.value [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] updated_port = self._update_port( [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] _ensure_no_port_binding_failure(port) [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] raise exception.PortBindingFailed(port_id=port['id']) [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] nova.exception.PortBindingFailed: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. [ 816.048066] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] [ 816.048066] env[61998]: INFO nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Terminating instance [ 816.049062] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.072020] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388466, 'name': ReconfigVM_Task, 'duration_secs': 0.292451} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.072501] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.073540] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-819b401c-0aca-45f7-91ae-fa3f89098f09 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.078944] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 816.078944] env[61998]: value = "task-1388467" [ 816.078944] env[61998]: _type = "Task" [ 816.078944] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.088155] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388467, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.149860] env[61998]: DEBUG nova.network.neutron [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.229895] env[61998]: DEBUG nova.network.neutron [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.309659] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.310206] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 816.313142] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.124s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.314680] env[61998]: INFO nova.compute.claims [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.336983] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e8aec6b1-7bef-41af-b3d9-a2a3ca3eb84d tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "5eb786f1-7789-48a0-a04e-a4039e387f58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.318s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.378455] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.589108] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388467, 'name': Rename_Task, 'duration_secs': 0.130807} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.589587] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 816.589966] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-069c48fc-c3b0-4812-bc54-8e555963181d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.601019] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 816.601019] env[61998]: value = "task-1388468" [ 816.601019] env[61998]: _type = "Task" [ 816.601019] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.607418] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388468, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.732284] env[61998]: DEBUG oslo_concurrency.lockutils [req-4e697b31-a33e-45bb-a080-16d76a10e5ca req-4bd5fff0-ed56-4130-aff5-65f5914d66c0 service nova] Releasing lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.732731] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.732920] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 816.819076] env[61998]: DEBUG nova.compute.utils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 816.820478] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 816.821134] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.839447] env[61998]: DEBUG nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 816.905992] env[61998]: DEBUG nova.policy [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a84795fdd94f588bddc83ce6ef325e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a6f115277994a32ae3a002d084e90af', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 817.111032] env[61998]: DEBUG oslo_vmware.api [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388468, 'name': PowerOnVM_Task, 'duration_secs': 0.453665} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.111367] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 817.111589] env[61998]: INFO nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Took 5.32 seconds to spawn the instance on the hypervisor. [ 817.111762] env[61998]: DEBUG nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 817.113837] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341b1c41-8d9b-4bb3-9881-42d015034981 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.254463] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.278118] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Successfully created port: 5685b696-3b99-4d26-8e53-1bcd8e90accb {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.325446] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 817.368718] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.410614] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.636971] env[61998]: INFO nova.compute.manager [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Took 24.33 seconds to build instance. [ 817.645718] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ddf6a4-5a0c-491b-9750-ad6e5cf656ba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.655818] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a692b3ca-a3cd-4b7e-aaec-50aebab10b99 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.690236] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d8c878-0f47-4b7b-82e3-70a38243fb23 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.698137] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4bd8e9-1c2a-470c-8c41-006104007e86 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.714564] env[61998]: DEBUG nova.compute.provider_tree [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.725098] env[61998]: DEBUG nova.compute.manager [req-7dce12d3-033e-4f2e-85fa-023348956524 req-02057238-23fa-4ab0-99a6-78b149decf2c service nova] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Received event network-vif-deleted-853b8636-f233-4b7c-a320-d00ec813dc5e {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 817.915191] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.915540] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 817.915540] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.915870] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c6f04c4-d34c-480c-9a41-8bf29ee1ed06 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.926320] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f20c4b-3e5c-4e32-9598-56e3a6343643 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.953189] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b3232fcd-43b2-4139-afe1-fbe863d0af30 could not be found. [ 817.953437] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 817.953618] env[61998]: INFO nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Took 0.04 seconds to destroy the instance on the hypervisor. [ 817.953868] env[61998]: DEBUG oslo.service.loopingcall [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.954212] env[61998]: DEBUG nova.compute.manager [-] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 817.954308] env[61998]: DEBUG nova.network.neutron [-] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 817.981855] env[61998]: DEBUG nova.network.neutron [-] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.139620] env[61998]: DEBUG oslo_concurrency.lockutils [None req-02a0a67c-607e-4b94-a0d0-de0b861c3d1b tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.881s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.217752] env[61998]: DEBUG nova.scheduler.client.report [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 818.341820] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 818.363644] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 818.363933] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 818.364050] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.364640] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 818.364640] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.364640] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 818.365204] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 818.365204] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 818.365204] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 818.365352] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 818.365650] env[61998]: DEBUG nova.virt.hardware [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 818.366832] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2535d2-c704-46a2-91d5-4a850923c638 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.374543] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a184b970-e321-4b5e-8285-5c3fccc3c464 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.421560] env[61998]: ERROR nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. [ 818.421560] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 818.421560] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 818.421560] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 818.421560] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.421560] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 818.421560] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.421560] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 818.421560] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.421560] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 818.421560] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.421560] env[61998]: ERROR nova.compute.manager raise self.value [ 818.421560] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.421560] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 818.421560] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.421560] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 818.422070] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.422070] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 818.422070] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. [ 818.422070] env[61998]: ERROR nova.compute.manager [ 818.422070] env[61998]: Traceback (most recent call last): [ 818.422070] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 818.422070] env[61998]: listener.cb(fileno) [ 818.422070] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.422070] env[61998]: result = function(*args, **kwargs) [ 818.422070] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 818.422070] env[61998]: return func(*args, **kwargs) [ 818.422070] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 818.422070] env[61998]: raise e [ 818.422070] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 818.422070] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 818.422070] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.422070] env[61998]: created_port_ids = self._update_ports_for_instance( [ 818.422070] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.422070] env[61998]: with excutils.save_and_reraise_exception(): [ 818.422070] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.422070] env[61998]: self.force_reraise() [ 818.422070] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.422070] env[61998]: raise self.value [ 818.422070] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.422070] env[61998]: updated_port = self._update_port( [ 818.422070] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.422070] env[61998]: _ensure_no_port_binding_failure(port) [ 818.422070] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.422070] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 818.422893] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. [ 818.422893] env[61998]: Removing descriptor: 15 [ 818.422893] env[61998]: ERROR nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Traceback (most recent call last): [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] yield resources [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self.driver.spawn(context, instance, image_meta, [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 818.422893] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] vm_ref = self.build_virtual_machine(instance, [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] vif_infos = vmwarevif.get_vif_info(self._session, [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] for vif in network_info: [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return self._sync_wrapper(fn, *args, **kwargs) [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self.wait() [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self[:] = self._gt.wait() [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return self._exit_event.wait() [ 818.423287] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] result = hub.switch() [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return self.greenlet.switch() [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] result = function(*args, **kwargs) [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return func(*args, **kwargs) [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] raise e [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] nwinfo = self.network_api.allocate_for_instance( [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 818.423667] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] created_port_ids = self._update_ports_for_instance( [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] with excutils.save_and_reraise_exception(): [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self.force_reraise() [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] raise self.value [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] updated_port = self._update_port( [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] _ensure_no_port_binding_failure(port) [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 818.424092] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] raise exception.PortBindingFailed(port_id=port['id']) [ 818.424442] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] nova.exception.PortBindingFailed: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. [ 818.424442] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] [ 818.424442] env[61998]: INFO nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Terminating instance [ 818.425293] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Acquiring lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.425484] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Acquired lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.425653] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.486337] env[61998]: DEBUG nova.network.neutron [-] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.645503] env[61998]: DEBUG nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 818.657884] env[61998]: INFO nova.compute.manager [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Rebuilding instance [ 818.704019] env[61998]: DEBUG nova.compute.manager [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 818.704019] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d7aead-ffaa-4d6f-9c30-a6ad53173acd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.725019] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.725019] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 818.726266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.479s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.943672] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.989637] env[61998]: INFO nova.compute.manager [-] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Took 1.04 seconds to deallocate network for instance. [ 818.991814] env[61998]: DEBUG nova.compute.claims [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 818.991986] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.026618] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.164846] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.231838] env[61998]: DEBUG nova.compute.utils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 819.233209] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 819.233388] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.290538] env[61998]: DEBUG nova.policy [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9eed83fd4caa4c27ad96de2822cbf39a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f54807d26f81498c8006059fa2100e5a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 819.536751] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Releasing lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.536751] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 819.536751] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.536751] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-412ac90a-9542-490e-a2c2-926a01835890 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.544212] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca42c977-4093-496c-bba3-1e6b94fd2367 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.556874] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2de978-1c63-4386-ba0e-a3d737b6ba05 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.564251] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdddbd84-aeaa-4f2f-9884-6d6dfa4f5c46 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.573455] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf could not be found. [ 819.573677] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 819.573863] env[61998]: INFO nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 819.574156] env[61998]: DEBUG oslo.service.loopingcall [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.574791] env[61998]: DEBUG nova.compute.manager [-] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 819.574919] env[61998]: DEBUG nova.network.neutron [-] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 819.606530] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addc37df-f9a9-4aab-911b-9653a72d7d32 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.615286] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51dd64c-020e-4aaa-9dbc-3aab390e8fda {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.628245] env[61998]: DEBUG nova.compute.provider_tree [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.633232] env[61998]: DEBUG nova.network.neutron [-] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.633232] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Successfully created port: 81dae91d-8920-4195-8367-94d44047f3eb {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.716420] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.716514] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc0bafcb-e227-444b-bb15-68f096bbf83c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.724838] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 819.724838] env[61998]: value = "task-1388469" [ 819.724838] env[61998]: _type = "Task" [ 819.724838] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.733746] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388469, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.740486] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 819.755046] env[61998]: DEBUG nova.compute.manager [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Received event network-changed-5685b696-3b99-4d26-8e53-1bcd8e90accb {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 819.755046] env[61998]: DEBUG nova.compute.manager [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Refreshing instance network info cache due to event network-changed-5685b696-3b99-4d26-8e53-1bcd8e90accb. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 819.755046] env[61998]: DEBUG oslo_concurrency.lockutils [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] Acquiring lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.755046] env[61998]: DEBUG oslo_concurrency.lockutils [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] Acquired lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.755046] env[61998]: DEBUG nova.network.neutron [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Refreshing network info cache for port 5685b696-3b99-4d26-8e53-1bcd8e90accb {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.138016] env[61998]: DEBUG nova.network.neutron [-] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.138016] env[61998]: DEBUG nova.scheduler.client.report [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 820.236040] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388469, 'name': PowerOffVM_Task, 'duration_secs': 0.195404} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.236546] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 820.237172] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.238068] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dece487e-4000-425d-a8b6-0e93c21a130e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.250790] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.251594] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6121fe18-36a1-422f-bb32-79beeb10602a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.277508] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.277738] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.277918] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleting the datastore file [datastore1] a7225abb-d8ea-49fc-85da-7791d9dde5bc {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.278189] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a8bddf5-55cb-4303-a627-16258a68a670 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.284187] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 820.284187] env[61998]: value = "task-1388471" [ 820.284187] env[61998]: _type = "Task" [ 820.284187] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.293690] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.320893] env[61998]: DEBUG nova.network.neutron [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.492396] env[61998]: DEBUG nova.network.neutron [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.641797] env[61998]: INFO nova.compute.manager [-] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Took 1.07 seconds to deallocate network for instance. [ 820.642582] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.643654] env[61998]: ERROR nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Traceback (most recent call last): [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self.driver.spawn(context, instance, image_meta, [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] vm_ref = self.build_virtual_machine(instance, [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 820.643654] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] for vif in network_info: [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return self._sync_wrapper(fn, *args, **kwargs) [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self.wait() [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self[:] = self._gt.wait() [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return self._exit_event.wait() [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] result = hub.switch() [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 820.643999] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return self.greenlet.switch() [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] result = function(*args, **kwargs) [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] return func(*args, **kwargs) [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] raise e [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] nwinfo = self.network_api.allocate_for_instance( [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] created_port_ids = self._update_ports_for_instance( [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] with excutils.save_and_reraise_exception(): [ 820.644386] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] self.force_reraise() [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] raise self.value [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] updated_port = self._update_port( [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] _ensure_no_port_binding_failure(port) [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] raise exception.PortBindingFailed(port_id=port['id']) [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] nova.exception.PortBindingFailed: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. [ 820.644746] env[61998]: ERROR nova.compute.manager [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] [ 820.645046] env[61998]: DEBUG nova.compute.utils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 820.648859] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.482s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.654848] env[61998]: DEBUG nova.compute.claims [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 820.655033] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.655448] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Build of instance 2963f997-eb4c-4bfd-be28-6c1b383598c6 was re-scheduled: Binding failed for port bbe3eacb-3a70-427b-acc9-57cc108cd42c, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 820.656062] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 820.656274] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.656418] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquired lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.656575] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.685050] env[61998]: ERROR nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. [ 820.685050] env[61998]: ERROR nova.compute.manager Traceback (most recent call last): [ 820.685050] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 820.685050] env[61998]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 820.685050] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.685050] env[61998]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 820.685050] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.685050] env[61998]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 820.685050] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.685050] env[61998]: ERROR nova.compute.manager self.force_reraise() [ 820.685050] env[61998]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.685050] env[61998]: ERROR nova.compute.manager raise self.value [ 820.685050] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.685050] env[61998]: ERROR nova.compute.manager updated_port = self._update_port( [ 820.685050] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.685050] env[61998]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 820.685791] env[61998]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.685791] env[61998]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 820.685791] env[61998]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. [ 820.685791] env[61998]: ERROR nova.compute.manager [ 820.685791] env[61998]: Traceback (most recent call last): [ 820.685791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 820.685791] env[61998]: listener.cb(fileno) [ 820.685791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.685791] env[61998]: result = function(*args, **kwargs) [ 820.685791] env[61998]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 820.685791] env[61998]: return func(*args, **kwargs) [ 820.685791] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 820.685791] env[61998]: raise e [ 820.685791] env[61998]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 820.685791] env[61998]: nwinfo = self.network_api.allocate_for_instance( [ 820.685791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.685791] env[61998]: created_port_ids = self._update_ports_for_instance( [ 820.685791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.685791] env[61998]: with excutils.save_and_reraise_exception(): [ 820.685791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.685791] env[61998]: self.force_reraise() [ 820.685791] env[61998]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.685791] env[61998]: raise self.value [ 820.685791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.685791] env[61998]: updated_port = self._update_port( [ 820.685791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.685791] env[61998]: _ensure_no_port_binding_failure(port) [ 820.685791] env[61998]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.685791] env[61998]: raise exception.PortBindingFailed(port_id=port['id']) [ 820.686607] env[61998]: nova.exception.PortBindingFailed: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. [ 820.686607] env[61998]: Removing descriptor: 15 [ 820.753728] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 820.783279] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 820.783745] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 820.784383] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.784383] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 820.784807] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.785506] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 820.785506] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 820.785506] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 820.785670] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 820.785771] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 820.786017] env[61998]: DEBUG nova.virt.hardware [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.787726] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1377835-d10d-4386-afc6-d414ba61396c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.797802] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388471, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087696} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.799899] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.800106] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.800285] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.804382] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a631bde9-7fd6-45bd-94cc-529d3172436e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.818725] env[61998]: ERROR nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Traceback (most recent call last): [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] yield resources [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self.driver.spawn(context, instance, image_meta, [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] vm_ref = self.build_virtual_machine(instance, [ 820.818725] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] vif_infos = vmwarevif.get_vif_info(self._session, [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] for vif in network_info: [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] return self._sync_wrapper(fn, *args, **kwargs) [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self.wait() [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self[:] = self._gt.wait() [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] return self._exit_event.wait() [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 820.819357] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] current.throw(*self._exc) [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] result = function(*args, **kwargs) [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] return func(*args, **kwargs) [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] raise e [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] nwinfo = self.network_api.allocate_for_instance( [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] created_port_ids = self._update_ports_for_instance( [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] with excutils.save_and_reraise_exception(): [ 820.819683] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self.force_reraise() [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] raise self.value [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] updated_port = self._update_port( [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] _ensure_no_port_binding_failure(port) [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] raise exception.PortBindingFailed(port_id=port['id']) [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] nova.exception.PortBindingFailed: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. [ 820.820210] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] [ 820.820210] env[61998]: INFO nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Terminating instance [ 820.820525] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Acquiring lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.820525] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Acquired lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.820627] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.995077] env[61998]: DEBUG oslo_concurrency.lockutils [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] Releasing lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.995414] env[61998]: DEBUG nova.compute.manager [req-66cb558d-e8f0-45b9-b5be-126aebe59c1c req-dbd966b1-f6fa-4d1d-a334-c7130bad58d4 service nova] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Received event network-vif-deleted-5685b696-3b99-4d26-8e53-1bcd8e90accb {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 821.217632] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.339797] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.351224] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.432629] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3408a999-c8bc-463c-aae7-814b35a20eba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.441930] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b921b27b-9793-4961-aa82-dbcfb1dfcc84 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.479511] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.481153] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf283c75-0cda-431e-a5ee-74f1226ed6f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.490030] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c19a9e-fbd0-4874-9d1f-c153254607cb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.503775] env[61998]: DEBUG nova.compute.provider_tree [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.793805] env[61998]: DEBUG nova.compute.manager [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Received event network-changed-81dae91d-8920-4195-8367-94d44047f3eb {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 821.794059] env[61998]: DEBUG nova.compute.manager [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Refreshing instance network info cache due to event network-changed-81dae91d-8920-4195-8367-94d44047f3eb. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 821.794299] env[61998]: DEBUG oslo_concurrency.lockutils [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] Acquiring lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.836321] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 821.836572] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 821.836727] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.836905] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 821.837061] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.837389] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 821.837632] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 821.837798] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 821.837964] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 821.838239] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 821.838355] env[61998]: DEBUG nova.virt.hardware [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 821.839184] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dbb143-d8f4-4e36-bc43-59e9fffb3b3a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.846370] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Releasing lock "refresh_cache-2963f997-eb4c-4bfd-be28-6c1b383598c6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.846579] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 821.846755] env[61998]: DEBUG nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 821.846918] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.849672] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0684235f-5a1f-49d9-8382-221687e3c68a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.863558] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.869189] env[61998]: DEBUG oslo.service.loopingcall [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.869948] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.871097] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.871283] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dff2d2c-b455-45de-9606-e2d8455818c1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.888796] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.888796] env[61998]: value = "task-1388472" [ 821.888796] env[61998]: _type = "Task" [ 821.888796] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.896192] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388472, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.984954] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Releasing lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.985490] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 821.985660] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 821.986314] env[61998]: DEBUG oslo_concurrency.lockutils [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] Acquired lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.986508] env[61998]: DEBUG nova.network.neutron [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Refreshing network info cache for port 81dae91d-8920-4195-8367-94d44047f3eb {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.987674] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a5e9e5d-fc3f-42ca-a283-7da403f15f5d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.996752] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c121437-8ef0-42a6-8805-c9f94e02c89f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.008641] env[61998]: DEBUG nova.scheduler.client.report [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 822.026366] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 169437f2-fb18-4d5c-8d00-b82e9e5752d5 could not be found. [ 822.026595] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.026798] env[61998]: INFO nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 822.027021] env[61998]: DEBUG oslo.service.loopingcall [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.027835] env[61998]: DEBUG nova.compute.manager [-] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 822.027940] env[61998]: DEBUG nova.network.neutron [-] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.052809] env[61998]: DEBUG nova.network.neutron [-] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.078077] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.078277] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.384023] env[61998]: DEBUG nova.network.neutron [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.399208] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388472, 'name': CreateVM_Task, 'duration_secs': 0.25175} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.399385] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.399754] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.399906] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.400243] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 822.400472] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25c9a67b-ee81-4259-8396-57a9f9b58ce5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.404555] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 822.404555] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52bfae5d-747c-ef72-199a-91bd2b6cba8c" [ 822.404555] env[61998]: _type = "Task" [ 822.404555] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.411486] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52bfae5d-747c-ef72-199a-91bd2b6cba8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.508510] env[61998]: DEBUG nova.network.neutron [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.513716] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.865s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.513894] env[61998]: ERROR nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] Traceback (most recent call last): [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self.driver.spawn(context, instance, image_meta, [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self._vmops.spawn(context, instance, image_meta, injected_files, [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] vm_ref = self.build_virtual_machine(instance, [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] vif_infos = vmwarevif.get_vif_info(self._session, [ 822.513894] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] for vif in network_info: [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return self._sync_wrapper(fn, *args, **kwargs) [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self.wait() [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self[:] = self._gt.wait() [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return self._exit_event.wait() [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] result = hub.switch() [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 822.514216] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return self.greenlet.switch() [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] result = function(*args, **kwargs) [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] return func(*args, **kwargs) [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] raise e [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] nwinfo = self.network_api.allocate_for_instance( [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] created_port_ids = self._update_ports_for_instance( [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] with excutils.save_and_reraise_exception(): [ 822.514530] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] self.force_reraise() [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] raise self.value [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] updated_port = self._update_port( [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] _ensure_no_port_binding_failure(port) [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] raise exception.PortBindingFailed(port_id=port['id']) [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] nova.exception.PortBindingFailed: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. [ 822.514832] env[61998]: ERROR nova.compute.manager [instance: 08e60642-0784-4898-9de5-444a24fba508] [ 822.515113] env[61998]: DEBUG nova.compute.utils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 822.515830] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.697s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.517412] env[61998]: INFO nova.compute.claims [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.520069] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Build of instance 08e60642-0784-4898-9de5-444a24fba508 was re-scheduled: Binding failed for port 3e49bdec-f837-41fb-a086-6e21c3fcbe48, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 822.520422] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 822.520646] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquiring lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.520792] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Acquired lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.520948] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.555756] env[61998]: DEBUG nova.network.neutron [-] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.613527] env[61998]: DEBUG nova.network.neutron [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.886975] env[61998]: INFO nova.compute.manager [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 2963f997-eb4c-4bfd-be28-6c1b383598c6] Took 1.04 seconds to deallocate network for instance. [ 822.915289] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52bfae5d-747c-ef72-199a-91bd2b6cba8c, 'name': SearchDatastore_Task, 'duration_secs': 0.009449} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.915594] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.915839] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.917671] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.917671] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.917671] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.917671] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3ca2c15-37b3-4b12-9243-ee2fc0fd0ce1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.925296] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.926252] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.926252] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f23f2546-9dfb-4753-8358-c47e8241bc36 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.932190] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 822.932190] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52466210-617c-40c3-9d7b-419b27487cd1" [ 822.932190] env[61998]: _type = "Task" [ 822.932190] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.940187] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52466210-617c-40c3-9d7b-419b27487cd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.043958] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.058605] env[61998]: INFO nova.compute.manager [-] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Took 1.03 seconds to deallocate network for instance. [ 823.060868] env[61998]: DEBUG nova.compute.claims [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Aborting claim: {{(pid=61998) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 823.061060] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.118929] env[61998]: DEBUG oslo_concurrency.lockutils [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] Releasing lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.119223] env[61998]: DEBUG nova.compute.manager [req-6dd0927a-2fc8-4cbd-b74e-bd2ef68065e3 req-079476b9-3731-4499-818f-46b1dabdd3f7 service nova] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Received event network-vif-deleted-81dae91d-8920-4195-8367-94d44047f3eb {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 823.129894] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.442171] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52466210-617c-40c3-9d7b-419b27487cd1, 'name': SearchDatastore_Task, 'duration_secs': 0.008148} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.442945] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-380bdb0e-90d0-49b7-bad0-eba97ad7a8e5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.447981] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 823.447981] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52833b7f-78d0-c010-40aa-f8870c79ccfe" [ 823.447981] env[61998]: _type = "Task" [ 823.447981] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.455415] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52833b7f-78d0-c010-40aa-f8870c79ccfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.633089] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Releasing lock "refresh_cache-08e60642-0784-4898-9de5-444a24fba508" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.633367] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 823.633553] env[61998]: DEBUG nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 823.633719] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 823.648162] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.745877] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693295e3-9608-4188-b747-8b389189793a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.753424] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4445e517-e4b3-43ff-b7bf-465c8e99e15b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.782439] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507b9857-c0a5-4afe-9b9c-4edfc4d31d2a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.789725] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6948480-a85f-4079-beb2-63381bf03cc5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.802326] env[61998]: DEBUG nova.compute.provider_tree [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.915943] env[61998]: INFO nova.scheduler.client.report [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Deleted allocations for instance 2963f997-eb4c-4bfd-be28-6c1b383598c6 [ 823.960708] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52833b7f-78d0-c010-40aa-f8870c79ccfe, 'name': SearchDatastore_Task, 'duration_secs': 0.008933} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.960963] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.964622] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 823.964883] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aea3f219-7742-4066-a61b-f9d816882c4a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.973223] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 823.973223] env[61998]: value = "task-1388473" [ 823.973223] env[61998]: _type = "Task" [ 823.973223] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.981027] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388473, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.151763] env[61998]: DEBUG nova.network.neutron [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.305254] env[61998]: DEBUG nova.scheduler.client.report [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 824.425522] env[61998]: DEBUG oslo_concurrency.lockutils [None req-97c85db1-bc04-4a6d-80f1-01139431fe2b tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "2963f997-eb4c-4bfd-be28-6c1b383598c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.630s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.483689] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388473, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464479} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.484018] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 824.484238] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 824.484513] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe0ea4e3-bc3b-4617-b92c-e1211c4623fb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.490299] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 824.490299] env[61998]: value = "task-1388474" [ 824.490299] env[61998]: _type = "Task" [ 824.490299] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.497809] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.654760] env[61998]: INFO nova.compute.manager [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] [instance: 08e60642-0784-4898-9de5-444a24fba508] Took 1.02 seconds to deallocate network for instance. [ 824.815186] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.815756] env[61998]: DEBUG nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 824.818946] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.790s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.820385] env[61998]: INFO nova.compute.claims [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.928703] env[61998]: DEBUG nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 824.999955] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058782} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.000243] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 825.001019] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160e4570-f005-4636-9b54-e22a5f4c522f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.020443] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 825.020695] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85a5f82b-f899-4794-bcef-910048c01c80 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.040161] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 825.040161] env[61998]: value = "task-1388475" [ 825.040161] env[61998]: _type = "Task" [ 825.040161] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.048296] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.324499] env[61998]: DEBUG nova.compute.utils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.329019] env[61998]: DEBUG nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 825.329019] env[61998]: DEBUG nova.network.neutron [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.403115] env[61998]: DEBUG nova.policy [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb3aa49d8e7641e18914d08411d2e4da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58de167acb584319ab41854e0641a962', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.456313] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.551235] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388475, 'name': ReconfigVM_Task, 'duration_secs': 0.28501} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.551620] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Reconfigured VM instance instance-0000003c to attach disk [datastore2] a7225abb-d8ea-49fc-85da-7791d9dde5bc/a7225abb-d8ea-49fc-85da-7791d9dde5bc.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.552458] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d11ed54a-9363-4c67-af1d-bce299e47a82 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.558969] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 825.558969] env[61998]: value = "task-1388476" [ 825.558969] env[61998]: _type = "Task" [ 825.558969] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.566679] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388476, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.693565] env[61998]: INFO nova.scheduler.client.report [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Deleted allocations for instance 08e60642-0784-4898-9de5-444a24fba508 [ 825.748351] env[61998]: DEBUG nova.network.neutron [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Successfully created port: 4f851cfd-9ab8-4add-99ef-c7c946ce98b7 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.832391] env[61998]: DEBUG nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 826.070988] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388476, 'name': Rename_Task, 'duration_secs': 0.155816} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.071433] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.071870] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5cd925a0-63ef-4372-be4d-c5646e8c9680 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.082387] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 826.082387] env[61998]: value = "task-1388477" [ 826.082387] env[61998]: _type = "Task" [ 826.082387] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.088635] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.106019] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f59b6b-74e6-4002-8038-517bd9a5f7c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.113549] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6943ca-5772-4d4a-8ee4-b05bf5413231 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.148031] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f66d1af-c525-44f3-8ef8-6ba425e10d85 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.155088] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bff85b-462d-49ac-9aa0-4b96a4eef668 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.168446] env[61998]: DEBUG nova.compute.provider_tree [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.209324] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a98a517b-69fd-4d34-949e-702010fe5202 tempest-ServerGroupTestJSON-1937241863 tempest-ServerGroupTestJSON-1937241863-project-member] Lock "08e60642-0784-4898-9de5-444a24fba508" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.450s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.588397] env[61998]: DEBUG oslo_vmware.api [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388477, 'name': PowerOnVM_Task, 'duration_secs': 0.416366} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.588710] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.588885] env[61998]: DEBUG nova.compute.manager [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 826.590816] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4012cf39-3c88-4c8b-9f02-6d93c39e957a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.596266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.596480] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.671746] env[61998]: DEBUG nova.scheduler.client.report [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 826.712532] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 826.843654] env[61998]: DEBUG nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 826.870300] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 826.870580] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 826.870754] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.870939] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 826.871099] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.871332] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 826.871561] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 826.872678] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 826.872678] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 826.872678] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 826.872678] env[61998]: DEBUG nova.virt.hardware [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.873073] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b39dbe-1e17-4b20-8bfd-d2e8bffbdba5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.881926] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb3da96-a0b6-4644-a4bb-0ec99764c7cc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.116291] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.176661] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.178019] env[61998]: DEBUG nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 827.179329] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.801s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.180967] env[61998]: INFO nova.compute.claims [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.236280] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.569590] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.569871] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.571029] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.571029] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.571289] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.573889] env[61998]: INFO nova.compute.manager [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Terminating instance [ 827.576236] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "refresh_cache-a7225abb-d8ea-49fc-85da-7791d9dde5bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.576397] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "refresh_cache-a7225abb-d8ea-49fc-85da-7791d9dde5bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.576715] env[61998]: DEBUG nova.network.neutron [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.639823] env[61998]: DEBUG nova.compute.manager [req-f7479b82-2e29-4ee3-8b0f-71511cf07c82 req-15b8f4d3-cd72-4507-b9d0-e9f5b7fa074e service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Received event network-vif-plugged-4f851cfd-9ab8-4add-99ef-c7c946ce98b7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 827.640095] env[61998]: DEBUG oslo_concurrency.lockutils [req-f7479b82-2e29-4ee3-8b0f-71511cf07c82 req-15b8f4d3-cd72-4507-b9d0-e9f5b7fa074e service nova] Acquiring lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.640268] env[61998]: DEBUG oslo_concurrency.lockutils [req-f7479b82-2e29-4ee3-8b0f-71511cf07c82 req-15b8f4d3-cd72-4507-b9d0-e9f5b7fa074e service nova] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.640435] env[61998]: DEBUG oslo_concurrency.lockutils [req-f7479b82-2e29-4ee3-8b0f-71511cf07c82 req-15b8f4d3-cd72-4507-b9d0-e9f5b7fa074e service nova] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.640599] env[61998]: DEBUG nova.compute.manager [req-f7479b82-2e29-4ee3-8b0f-71511cf07c82 req-15b8f4d3-cd72-4507-b9d0-e9f5b7fa074e service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] No waiting events found dispatching network-vif-plugged-4f851cfd-9ab8-4add-99ef-c7c946ce98b7 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 827.640779] env[61998]: WARNING nova.compute.manager [req-f7479b82-2e29-4ee3-8b0f-71511cf07c82 req-15b8f4d3-cd72-4507-b9d0-e9f5b7fa074e service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Received unexpected event network-vif-plugged-4f851cfd-9ab8-4add-99ef-c7c946ce98b7 for instance with vm_state building and task_state spawning. [ 827.686224] env[61998]: DEBUG nova.compute.utils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.694020] env[61998]: DEBUG nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 827.694020] env[61998]: DEBUG nova.network.neutron [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.735339] env[61998]: DEBUG nova.policy [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f155bbfca47547c2bf745811003ffcec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f49104f21d7147328bcc8edee8d3cdb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 827.821088] env[61998]: DEBUG nova.network.neutron [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Successfully updated port: 4f851cfd-9ab8-4add-99ef-c7c946ce98b7 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.066344] env[61998]: DEBUG nova.network.neutron [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Successfully created port: 50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.109170] env[61998]: DEBUG nova.network.neutron [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.199117] env[61998]: DEBUG nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 828.209900] env[61998]: DEBUG nova.network.neutron [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.328017] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "refresh_cache-c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.328017] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquired lock "refresh_cache-c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.328017] env[61998]: DEBUG nova.network.neutron [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.509170] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453c4a22-c349-4a3d-823b-f4d5688846a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.517259] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5778b3f-1068-4f66-b8b3-caec64fbbe78 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.551664] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dedfde-4e64-4baa-9a8a-6c0dca0721cf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.560190] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfdd779-da0b-46f2-a501-3a1856ed0cf8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.576490] env[61998]: DEBUG nova.compute.provider_tree [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.713032] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "refresh_cache-a7225abb-d8ea-49fc-85da-7791d9dde5bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.713610] env[61998]: DEBUG nova.compute.manager [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 828.713883] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.714799] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a917c38-0430-454c-b313-86639c0f1132 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.722328] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.722603] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21efbd0a-dbf9-4cb3-8295-aa888908c8d9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.728174] env[61998]: DEBUG oslo_vmware.api [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 828.728174] env[61998]: value = "task-1388478" [ 828.728174] env[61998]: _type = "Task" [ 828.728174] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.735494] env[61998]: DEBUG oslo_vmware.api [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.876603] env[61998]: DEBUG nova.network.neutron [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.081020] env[61998]: DEBUG nova.scheduler.client.report [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 829.158464] env[61998]: DEBUG nova.network.neutron [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Updating instance_info_cache with network_info: [{"id": "4f851cfd-9ab8-4add-99ef-c7c946ce98b7", "address": "fa:16:3e:bd:9c:a8", "network": {"id": "6f328907-73b8-4fcf-8f72-2b8abc0561d2", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-29058134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58de167acb584319ab41854e0641a962", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f851cfd-9a", "ovs_interfaceid": "4f851cfd-9ab8-4add-99ef-c7c946ce98b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.206617] env[61998]: DEBUG nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 829.240591] env[61998]: DEBUG oslo_vmware.api [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388478, 'name': PowerOffVM_Task, 'duration_secs': 0.177214} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.242818] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.243212] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.243466] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.243734] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.243940] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.244158] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.244460] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.244647] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.244869] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.245095] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.245328] env[61998]: DEBUG nova.virt.hardware [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.245682] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 829.245902] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 829.246713] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8a5fec-8f51-46f7-9853-b0be2f1019be {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.249245] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e2328b6-b4cb-4adf-bf1d-b77deba21b1b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.256239] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab5f937-2f7c-4307-aca3-d60f2c1ee161 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.275813] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 829.276128] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 829.276334] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleting the datastore file [datastore2] a7225abb-d8ea-49fc-85da-7791d9dde5bc {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.276640] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-141b4a25-381b-4b9d-a436-a20dece32ec2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.284019] env[61998]: DEBUG oslo_vmware.api [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 829.284019] env[61998]: value = "task-1388480" [ 829.284019] env[61998]: _type = "Task" [ 829.284019] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.292182] env[61998]: DEBUG oslo_vmware.api [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.587202] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.587202] env[61998]: DEBUG nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 829.588307] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.221s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.589846] env[61998]: INFO nova.compute.claims [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.666341] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Releasing lock "refresh_cache-c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.666341] env[61998]: DEBUG nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Instance network_info: |[{"id": "4f851cfd-9ab8-4add-99ef-c7c946ce98b7", "address": "fa:16:3e:bd:9c:a8", "network": {"id": "6f328907-73b8-4fcf-8f72-2b8abc0561d2", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-29058134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58de167acb584319ab41854e0641a962", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f851cfd-9a", "ovs_interfaceid": "4f851cfd-9ab8-4add-99ef-c7c946ce98b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 829.666517] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:9c:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '01fe2e08-46f6-4cee-aefd-934461f8077d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f851cfd-9ab8-4add-99ef-c7c946ce98b7', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.675434] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Creating folder: Project (58de167acb584319ab41854e0641a962). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.677059] env[61998]: DEBUG nova.compute.manager [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Received event network-changed-4f851cfd-9ab8-4add-99ef-c7c946ce98b7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 829.678116] env[61998]: DEBUG nova.compute.manager [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Refreshing instance network info cache due to event network-changed-4f851cfd-9ab8-4add-99ef-c7c946ce98b7. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 829.678116] env[61998]: DEBUG oslo_concurrency.lockutils [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] Acquiring lock "refresh_cache-c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.678116] env[61998]: DEBUG oslo_concurrency.lockutils [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] Acquired lock "refresh_cache-c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.678116] env[61998]: DEBUG nova.network.neutron [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Refreshing network info cache for port 4f851cfd-9ab8-4add-99ef-c7c946ce98b7 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.679408] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cacdaf01-716e-4327-aee0-725dace72aba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.692806] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Created folder: Project (58de167acb584319ab41854e0641a962) in parent group-v294665. [ 829.693029] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Creating folder: Instances. Parent ref: group-v294694. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.693373] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df9d6215-82ea-44a0-b518-4fa6b1a25e3b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.702877] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Created folder: Instances in parent group-v294694. [ 829.703217] env[61998]: DEBUG oslo.service.loopingcall [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.703455] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.703677] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f47e7397-5b0d-48c9-a4e8-98d2c26d5b9e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.722452] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.722452] env[61998]: value = "task-1388483" [ 829.722452] env[61998]: _type = "Task" [ 829.722452] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.727222] env[61998]: DEBUG nova.compute.manager [req-fd0a471d-d0b1-4935-9885-e3196fdc2731 req-1f21aaff-d3f8-420f-83c0-21de0ed75984 service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Received event network-vif-plugged-50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 829.727950] env[61998]: DEBUG oslo_concurrency.lockutils [req-fd0a471d-d0b1-4935-9885-e3196fdc2731 req-1f21aaff-d3f8-420f-83c0-21de0ed75984 service nova] Acquiring lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.727950] env[61998]: DEBUG oslo_concurrency.lockutils [req-fd0a471d-d0b1-4935-9885-e3196fdc2731 req-1f21aaff-d3f8-420f-83c0-21de0ed75984 service nova] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.727950] env[61998]: DEBUG oslo_concurrency.lockutils [req-fd0a471d-d0b1-4935-9885-e3196fdc2731 req-1f21aaff-d3f8-420f-83c0-21de0ed75984 service nova] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.728094] env[61998]: DEBUG nova.compute.manager [req-fd0a471d-d0b1-4935-9885-e3196fdc2731 req-1f21aaff-d3f8-420f-83c0-21de0ed75984 service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] No waiting events found dispatching network-vif-plugged-50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 829.728389] env[61998]: WARNING nova.compute.manager [req-fd0a471d-d0b1-4935-9885-e3196fdc2731 req-1f21aaff-d3f8-420f-83c0-21de0ed75984 service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Received unexpected event network-vif-plugged-50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 for instance with vm_state building and task_state spawning. [ 829.733660] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388483, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.800116] env[61998]: DEBUG oslo_vmware.api [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088578} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.800228] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.800544] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.800893] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.801226] env[61998]: INFO nova.compute.manager [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Took 1.09 seconds to destroy the instance on the hypervisor. [ 829.801833] env[61998]: DEBUG oslo.service.loopingcall [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.802073] env[61998]: DEBUG nova.compute.manager [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 829.802173] env[61998]: DEBUG nova.network.neutron [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.821274] env[61998]: DEBUG nova.network.neutron [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.095037] env[61998]: DEBUG nova.compute.utils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.096435] env[61998]: DEBUG nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 830.096613] env[61998]: DEBUG nova.network.neutron [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.166842] env[61998]: DEBUG nova.policy [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7c2b0f1e1dd4b93862b0316ea6770a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5dc1064c95484fd4afd1de8243b72d55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.218576] env[61998]: DEBUG nova.network.neutron [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Successfully updated port: 50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.234535] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388483, 'name': CreateVM_Task, 'duration_secs': 0.301541} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.234687] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.253658] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.253841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.254191] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 830.254519] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daba6d45-6de6-4416-a3ad-b7d47e5b481a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.261253] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 830.261253] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]527f541e-1a63-ea9f-c3a5-dbb63080c149" [ 830.261253] env[61998]: _type = "Task" [ 830.261253] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.268951] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]527f541e-1a63-ea9f-c3a5-dbb63080c149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.326399] env[61998]: DEBUG nova.compute.manager [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Received event network-changed-50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 830.326681] env[61998]: DEBUG nova.compute.manager [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Refreshing instance network info cache due to event network-changed-50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 830.326915] env[61998]: DEBUG oslo_concurrency.lockutils [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] Acquiring lock "refresh_cache-b3a3bb81-843b-4227-bebf-a8079f98c0f8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.327043] env[61998]: DEBUG oslo_concurrency.lockutils [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] Acquired lock "refresh_cache-b3a3bb81-843b-4227-bebf-a8079f98c0f8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.327203] env[61998]: DEBUG nova.network.neutron [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Refreshing network info cache for port 50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.332079] env[61998]: DEBUG nova.network.neutron [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.606301] env[61998]: DEBUG nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 830.686996] env[61998]: DEBUG nova.network.neutron [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Updated VIF entry in instance network info cache for port 4f851cfd-9ab8-4add-99ef-c7c946ce98b7. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.687359] env[61998]: DEBUG nova.network.neutron [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Updating instance_info_cache with network_info: [{"id": "4f851cfd-9ab8-4add-99ef-c7c946ce98b7", "address": "fa:16:3e:bd:9c:a8", "network": {"id": "6f328907-73b8-4fcf-8f72-2b8abc0561d2", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-29058134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58de167acb584319ab41854e0641a962", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f851cfd-9a", "ovs_interfaceid": "4f851cfd-9ab8-4add-99ef-c7c946ce98b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.727454] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-b3a3bb81-843b-4227-bebf-a8079f98c0f8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.775051] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]527f541e-1a63-ea9f-c3a5-dbb63080c149, 'name': SearchDatastore_Task, 'duration_secs': 0.009828} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.775481] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.775830] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.776187] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.776677] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.777022] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.779681] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c67a709f-953b-46bf-8db4-10d17694def2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.836595] env[61998]: INFO nova.compute.manager [-] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Took 1.03 seconds to deallocate network for instance. [ 830.866938] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f39410-4d91-4d97-934a-e969e97abea9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.876413] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8f10bc-242c-49b6-a870-2d875a528411 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.882253] env[61998]: DEBUG nova.network.neutron [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.914934] env[61998]: DEBUG nova.network.neutron [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Successfully created port: da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.917414] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fe26eb-c757-49ec-881e-02735d80360b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.920043] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.920225] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.921361] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5618781-d628-4b59-b58c-1bf891d1bbd4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.932261] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4f2290-1fd0-45e7-b939-32ba324182e7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.936475] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 830.936475] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5238a569-32a9-646c-e768-33cef29d06ed" [ 830.936475] env[61998]: _type = "Task" [ 830.936475] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.952856] env[61998]: DEBUG nova.compute.provider_tree [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.960077] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5238a569-32a9-646c-e768-33cef29d06ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009913} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.960370] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4a35f7b-8bec-41e3-ade6-0cdd1a49b4c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.965949] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 830.965949] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52acc90c-b831-0006-6140-0f2db0694a54" [ 830.965949] env[61998]: _type = "Task" [ 830.965949] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.974249] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52acc90c-b831-0006-6140-0f2db0694a54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.982657] env[61998]: DEBUG nova.network.neutron [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.189598] env[61998]: DEBUG oslo_concurrency.lockutils [req-acca39ff-b0f4-4bb7-9506-e5f462987e33 req-457a9267-d900-4a54-aa6c-9de48e3e8490 service nova] Releasing lock "refresh_cache-c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.344300] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.456390] env[61998]: DEBUG nova.scheduler.client.report [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 831.477445] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52acc90c-b831-0006-6140-0f2db0694a54, 'name': SearchDatastore_Task, 'duration_secs': 0.009199} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.477674] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.477910] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] c55717f0-8ef2-4e55-b1cf-60f6faea9e5e/c55717f0-8ef2-4e55-b1cf-60f6faea9e5e.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.479328] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2843188-c9c9-4918-8bc1-92304590548b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.484768] env[61998]: DEBUG oslo_concurrency.lockutils [req-70c8fcf8-e5af-4b5f-b133-df10107c38f2 req-d8ced59d-a4c4-4927-ba30-95ef7da0465d service nova] Releasing lock "refresh_cache-b3a3bb81-843b-4227-bebf-a8079f98c0f8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.485190] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 831.485190] env[61998]: value = "task-1388484" [ 831.485190] env[61998]: _type = "Task" [ 831.485190] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.485431] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-b3a3bb81-843b-4227-bebf-a8079f98c0f8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.485583] env[61998]: DEBUG nova.network.neutron [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.494110] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.619480] env[61998]: DEBUG nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 831.651901] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 831.651901] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 831.651901] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.652136] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 831.652136] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.652136] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 831.652136] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 831.652850] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 831.653238] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 831.653613] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 831.654034] env[61998]: DEBUG nova.virt.hardware [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 831.655474] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74ecd3e-e515-4c02-ba56-2090d17b31d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.665579] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a300b6-c376-4865-a90d-18e7fa7e348a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.964202] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.964202] env[61998]: DEBUG nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 831.967209] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.974s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.000463] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388484, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.032110] env[61998]: DEBUG nova.network.neutron [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.192288] env[61998]: DEBUG nova.network.neutron [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Updating instance_info_cache with network_info: [{"id": "50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7", "address": "fa:16:3e:bf:bf:0e", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50c8c3d1-4b", "ovs_interfaceid": "50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.477596] env[61998]: DEBUG nova.compute.utils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 832.480190] env[61998]: DEBUG nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 832.480363] env[61998]: DEBUG nova.network.neutron [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.507505] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516201} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.512547] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] c55717f0-8ef2-4e55-b1cf-60f6faea9e5e/c55717f0-8ef2-4e55-b1cf-60f6faea9e5e.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.512781] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.516978] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7687dc0-ac57-4c73-9b2b-1c480f7cdd75 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.521627] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 832.521627] env[61998]: value = "task-1388485" [ 832.521627] env[61998]: _type = "Task" [ 832.521627] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.535364] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388485, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.609443] env[61998]: DEBUG nova.compute.manager [req-6253fe03-a65e-4a1b-818d-7614f13128d3 req-7f97c81d-8206-4340-a56e-68c6d90edae8 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-vif-plugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 832.609703] env[61998]: DEBUG oslo_concurrency.lockutils [req-6253fe03-a65e-4a1b-818d-7614f13128d3 req-7f97c81d-8206-4340-a56e-68c6d90edae8 service nova] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.609914] env[61998]: DEBUG oslo_concurrency.lockutils [req-6253fe03-a65e-4a1b-818d-7614f13128d3 req-7f97c81d-8206-4340-a56e-68c6d90edae8 service nova] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.610200] env[61998]: DEBUG oslo_concurrency.lockutils [req-6253fe03-a65e-4a1b-818d-7614f13128d3 req-7f97c81d-8206-4340-a56e-68c6d90edae8 service nova] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.610388] env[61998]: DEBUG nova.compute.manager [req-6253fe03-a65e-4a1b-818d-7614f13128d3 req-7f97c81d-8206-4340-a56e-68c6d90edae8 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] No waiting events found dispatching network-vif-plugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.612028] env[61998]: WARNING nova.compute.manager [req-6253fe03-a65e-4a1b-818d-7614f13128d3 req-7f97c81d-8206-4340-a56e-68c6d90edae8 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received unexpected event network-vif-plugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 for instance with vm_state building and task_state spawning. [ 832.612522] env[61998]: DEBUG nova.policy [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25919d91b6fe4f31a85b4109149e261b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df09ba4531ae4b1e8e83f9b382b82c5c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 832.697959] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-b3a3bb81-843b-4227-bebf-a8079f98c0f8" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.698436] env[61998]: DEBUG nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance network_info: |[{"id": "50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7", "address": "fa:16:3e:bf:bf:0e", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50c8c3d1-4b", "ovs_interfaceid": "50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 832.698733] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:bf:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.709154] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating folder: Project (f49104f21d7147328bcc8edee8d3cdb2). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.712150] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-214de3d7-61d3-4fca-8ca7-587204d43cd9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.721704] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created folder: Project (f49104f21d7147328bcc8edee8d3cdb2) in parent group-v294665. [ 832.721887] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating folder: Instances. Parent ref: group-v294697. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.722133] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26f97d67-48f2-4ec0-bfc7-e4bf7d8839f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.734042] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created folder: Instances in parent group-v294697. [ 832.734042] env[61998]: DEBUG oslo.service.loopingcall [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.736980] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.737429] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4bb08f1-01a8-49b1-b881-caa6dcb6c4cf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.761062] env[61998]: DEBUG nova.network.neutron [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Successfully updated port: da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.766023] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.766023] env[61998]: value = "task-1388488" [ 832.766023] env[61998]: _type = "Task" [ 832.766023] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.777587] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388488, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.818954] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104611ff-891e-4f3f-b39b-94d7258d14d7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.826991] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76eb81b-c46c-4472-af75-dd803db423e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.858832] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db24efdf-6317-45f2-bb27-d5a8875ed093 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.869030] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db37df9-d33f-4f5e-8437-5798022c4ff8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.881467] env[61998]: DEBUG nova.compute.provider_tree [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.986999] env[61998]: DEBUG nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 833.036905] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388485, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058604} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.037918] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.039747] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1c2545-4616-466e-8aca-3e7b89046584 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.062881] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] c55717f0-8ef2-4e55-b1cf-60f6faea9e5e/c55717f0-8ef2-4e55-b1cf-60f6faea9e5e.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.063570] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b621963-1ce2-4b23-bd91-313a88d3dd1a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.079649] env[61998]: DEBUG nova.network.neutron [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Successfully created port: 2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.088858] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 833.088858] env[61998]: value = "task-1388489" [ 833.088858] env[61998]: _type = "Task" [ 833.088858] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.100157] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388489, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.262867] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.263030] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.263199] env[61998]: DEBUG nova.network.neutron [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.277248] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388488, 'name': CreateVM_Task, 'duration_secs': 0.347173} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.278155] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.278946] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.279123] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.279870] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 833.280353] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-035b99ba-bf92-4861-8eab-4d51958e6da3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.286183] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 833.286183] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524afd3a-9516-1a56-d39b-b360d334f0e6" [ 833.286183] env[61998]: _type = "Task" [ 833.286183] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.296866] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524afd3a-9516-1a56-d39b-b360d334f0e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.384758] env[61998]: DEBUG nova.scheduler.client.report [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 833.599765] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388489, 'name': ReconfigVM_Task, 'duration_secs': 0.287561} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.600058] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Reconfigured VM instance instance-00000040 to attach disk [datastore1] c55717f0-8ef2-4e55-b1cf-60f6faea9e5e/c55717f0-8ef2-4e55-b1cf-60f6faea9e5e.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.600677] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b8f6650-33f1-41d8-8f04-d3b93521334e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.606798] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 833.606798] env[61998]: value = "task-1388490" [ 833.606798] env[61998]: _type = "Task" [ 833.606798] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.614687] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388490, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.727810] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "1206c5c7-3eae-437b-9386-f3af937b8795" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.728066] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "1206c5c7-3eae-437b-9386-f3af937b8795" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.798372] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524afd3a-9516-1a56-d39b-b360d334f0e6, 'name': SearchDatastore_Task, 'duration_secs': 0.020554} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.798372] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.798372] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.798604] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.798641] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.799417] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.799417] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-967488fc-a026-41de-a695-9f1ce63c14ec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.805648] env[61998]: DEBUG nova.network.neutron [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.808819] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.808997] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.810074] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8342f75-8408-4034-937c-b4deadafbe55 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.814483] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 833.814483] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52a3adc6-583f-e88b-1356-c384fcaa17d0" [ 833.814483] env[61998]: _type = "Task" [ 833.814483] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.829316] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a3adc6-583f-e88b-1356-c384fcaa17d0, 'name': SearchDatastore_Task, 'duration_secs': 0.008417} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.830076] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-978d0614-12ae-4611-99d2-ec07e429c4bf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.834669] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 833.834669] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b8738c-ee26-ee0c-7402-7f8d03c81b12" [ 833.834669] env[61998]: _type = "Task" [ 833.834669] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.843119] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b8738c-ee26-ee0c-7402-7f8d03c81b12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.893780] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.928s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.894440] env[61998]: ERROR nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Traceback (most recent call last): [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self.driver.spawn(context, instance, image_meta, [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] vm_ref = self.build_virtual_machine(instance, [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] vif_infos = vmwarevif.get_vif_info(self._session, [ 833.894440] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] for vif in network_info: [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] return self._sync_wrapper(fn, *args, **kwargs) [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self.wait() [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self[:] = self._gt.wait() [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] return self._exit_event.wait() [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] current.throw(*self._exc) [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.894850] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] result = function(*args, **kwargs) [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] return func(*args, **kwargs) [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] raise e [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] nwinfo = self.network_api.allocate_for_instance( [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] created_port_ids = self._update_ports_for_instance( [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] with excutils.save_and_reraise_exception(): [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] self.force_reraise() [ 833.895182] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] raise self.value [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] updated_port = self._update_port( [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] _ensure_no_port_binding_failure(port) [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] raise exception.PortBindingFailed(port_id=port['id']) [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] nova.exception.PortBindingFailed: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. [ 833.895531] env[61998]: ERROR nova.compute.manager [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] [ 833.895531] env[61998]: DEBUG nova.compute.utils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 833.896318] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.732s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.897743] env[61998]: INFO nova.compute.claims [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.900362] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Build of instance b3232fcd-43b2-4139-afe1-fbe863d0af30 was re-scheduled: Binding failed for port 853b8636-f233-4b7c-a320-d00ec813dc5e, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 833.900770] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 833.900987] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.901145] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.901301] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.981677] env[61998]: DEBUG nova.network.neutron [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.996864] env[61998]: DEBUG nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 834.020545] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.020789] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.020973] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.021127] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.021275] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.021474] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.021707] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.021864] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.022038] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.022203] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.022372] env[61998]: DEBUG nova.virt.hardware [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.023243] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86586b8-ed58-46fa-855d-f8bbb5212551 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.031509] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c0f047-41d7-4caa-b439-02153f6d0ed6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.119098] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388490, 'name': Rename_Task, 'duration_secs': 0.138982} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.119098] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.119098] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abfae333-e763-42f5-b8f1-348cb07039f3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.126017] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 834.126017] env[61998]: value = "task-1388491" [ 834.126017] env[61998]: _type = "Task" [ 834.126017] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.132554] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.345608] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b8738c-ee26-ee0c-7402-7f8d03c81b12, 'name': SearchDatastore_Task, 'duration_secs': 0.007586} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.345905] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.346176] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.346467] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33c27817-6437-4080-87c4-c53083833ac6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.353795] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 834.353795] env[61998]: value = "task-1388492" [ 834.353795] env[61998]: _type = "Task" [ 834.353795] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.361106] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.433299] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.484167] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.484448] env[61998]: DEBUG nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Instance network_info: |[{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 834.484933] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:c1:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.492946] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating folder: Project (5dc1064c95484fd4afd1de8243b72d55). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 834.493308] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a26062d-111b-4a35-bf0b-829852e8b3ab {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.504641] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created folder: Project (5dc1064c95484fd4afd1de8243b72d55) in parent group-v294665. [ 834.504873] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating folder: Instances. Parent ref: group-v294700. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 834.505145] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3dbe618c-2f3d-4e9f-84a3-46012b07acda {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.514480] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created folder: Instances in parent group-v294700. [ 834.514758] env[61998]: DEBUG oslo.service.loopingcall [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.514976] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.515218] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a371a26-ee0a-487e-8a79-591464967ebe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.536483] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.536483] env[61998]: value = "task-1388495" [ 834.536483] env[61998]: _type = "Task" [ 834.536483] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.544735] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388495, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.545926] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.641226] env[61998]: DEBUG oslo_vmware.api [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388491, 'name': PowerOnVM_Task, 'duration_secs': 0.458737} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.641663] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.641894] env[61998]: INFO nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Took 7.80 seconds to spawn the instance on the hypervisor. [ 834.642173] env[61998]: DEBUG nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 834.642938] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357b1fd7-e204-4bf8-b797-35b75ae5bda9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.728736] env[61998]: DEBUG nova.compute.manager [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 834.729724] env[61998]: DEBUG nova.compute.manager [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing instance network info cache due to event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 834.730248] env[61998]: DEBUG oslo_concurrency.lockutils [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.731267] env[61998]: DEBUG oslo_concurrency.lockutils [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.731267] env[61998]: DEBUG nova.network.neutron [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.778347] env[61998]: DEBUG nova.compute.manager [req-10eeeaa4-8fcd-4207-a4e4-91dc154daf60 req-bcfc059d-7ff6-4e4c-be5c-35cac588cf85 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Received event network-vif-plugged-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 834.778681] env[61998]: DEBUG oslo_concurrency.lockutils [req-10eeeaa4-8fcd-4207-a4e4-91dc154daf60 req-bcfc059d-7ff6-4e4c-be5c-35cac588cf85 service nova] Acquiring lock "dadd9985-bca3-4207-927f-9490e0ae3f10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.778890] env[61998]: DEBUG oslo_concurrency.lockutils [req-10eeeaa4-8fcd-4207-a4e4-91dc154daf60 req-bcfc059d-7ff6-4e4c-be5c-35cac588cf85 service nova] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.778956] env[61998]: DEBUG oslo_concurrency.lockutils [req-10eeeaa4-8fcd-4207-a4e4-91dc154daf60 req-bcfc059d-7ff6-4e4c-be5c-35cac588cf85 service nova] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.779143] env[61998]: DEBUG nova.compute.manager [req-10eeeaa4-8fcd-4207-a4e4-91dc154daf60 req-bcfc059d-7ff6-4e4c-be5c-35cac588cf85 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] No waiting events found dispatching network-vif-plugged-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.779315] env[61998]: WARNING nova.compute.manager [req-10eeeaa4-8fcd-4207-a4e4-91dc154daf60 req-bcfc059d-7ff6-4e4c-be5c-35cac588cf85 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Received unexpected event network-vif-plugged-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 for instance with vm_state building and task_state spawning. [ 834.867347] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388492, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.943414] env[61998]: DEBUG nova.network.neutron [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Successfully updated port: 2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.050648] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-b3232fcd-43b2-4139-afe1-fbe863d0af30" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.050990] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 835.051245] env[61998]: DEBUG nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 835.051372] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.052967] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388495, 'name': CreateVM_Task, 'duration_secs': 0.44561} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.055554] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 835.056775] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.057327] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.057844] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 835.058241] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-719d7298-75fc-4055-bc35-2da5c1b87f9c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.065573] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 835.065573] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52416ccd-bc21-ef5b-2812-9dfa1dce2ff6" [ 835.065573] env[61998]: _type = "Task" [ 835.065573] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.076035] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52416ccd-bc21-ef5b-2812-9dfa1dce2ff6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.076987] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.171487] env[61998]: INFO nova.compute.manager [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Took 27.37 seconds to build instance. [ 835.219642] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ec7ed2-b7fa-4840-9e7a-4b46f5bbd321 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.228193] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ddce37-bb74-4f9a-bf3d-dd056a75f0a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.264100] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a058a9-a981-4f7a-b0da-6a8d6dc838a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.272479] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d0d9e2-93ca-4ece-9804-336a67b15ada {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.286430] env[61998]: DEBUG nova.compute.provider_tree [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.365592] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732297} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.365592] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.365830] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.365908] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b34ee8b1-41df-456c-9726-7ef0134a6b96 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.373917] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 835.373917] env[61998]: value = "task-1388496" [ 835.373917] env[61998]: _type = "Task" [ 835.373917] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.383281] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388496, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.449793] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.449956] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.450111] env[61998]: DEBUG nova.network.neutron [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.530118] env[61998]: DEBUG nova.network.neutron [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updated VIF entry in instance network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.530553] env[61998]: DEBUG nova.network.neutron [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.576637] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52416ccd-bc21-ef5b-2812-9dfa1dce2ff6, 'name': SearchDatastore_Task, 'duration_secs': 0.012777} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.576961] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.577331] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.577459] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.577607] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.577784] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.578063] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4729438d-c1c1-4210-82d6-8c650ecd6e11 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.581609] env[61998]: DEBUG nova.network.neutron [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.598407] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.598750] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.599917] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8ba86ac-24a2-49a1-ae22-ed3b4be48bfd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.608048] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 835.608048] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52db36cc-3262-3f37-44f9-87ed7a801bb8" [ 835.608048] env[61998]: _type = "Task" [ 835.608048] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.620703] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52db36cc-3262-3f37-44f9-87ed7a801bb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.674093] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3de70ad7-51c6-44ef-8600-37ba9be96366 tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.631s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.789717] env[61998]: DEBUG nova.scheduler.client.report [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 835.887432] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388496, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.42512} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.889296] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.894833] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0d7196-0df5-4ad0-93ba-c21966f5cca6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.932373] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.934076] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a660fe16-07cd-4ce1-944f-bb1a2fbae9b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.963036] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 835.963036] env[61998]: value = "task-1388497" [ 835.963036] env[61998]: _type = "Task" [ 835.963036] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.971951] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388497, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.000108] env[61998]: DEBUG nova.network.neutron [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.033202] env[61998]: DEBUG oslo_concurrency.lockutils [req-ef517a0f-0947-4308-a093-029bc655f84e req-b739d2d2-78cd-4436-81b7-bde4eba3036a service nova] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.084223] env[61998]: INFO nova.compute.manager [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: b3232fcd-43b2-4139-afe1-fbe863d0af30] Took 1.03 seconds to deallocate network for instance. [ 836.119314] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52db36cc-3262-3f37-44f9-87ed7a801bb8, 'name': SearchDatastore_Task, 'duration_secs': 0.050789} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.120133] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbadd3c1-0ca3-4c56-99a8-88131158b7ce {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.126439] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 836.126439] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52944ef4-a126-8afc-4392-bd4f47b77c6d" [ 836.126439] env[61998]: _type = "Task" [ 836.126439] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.135542] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52944ef4-a126-8afc-4392-bd4f47b77c6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.146756] env[61998]: DEBUG nova.network.neutron [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updating instance_info_cache with network_info: [{"id": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "address": "fa:16:3e:6e:db:74", "network": {"id": "9dcc2d50-24ed-410c-9c13-3d8d140b5cee", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1965897712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df09ba4531ae4b1e8e83f9b382b82c5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f6d1-79", "ovs_interfaceid": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.177411] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 836.295033] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.296028] env[61998]: DEBUG nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 836.299230] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.644s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.399399] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.399680] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.399975] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.400199] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.400371] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.402902] env[61998]: INFO nova.compute.manager [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Terminating instance [ 836.407835] env[61998]: DEBUG nova.compute.manager [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 836.408044] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.408935] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f060d27-3c03-4119-9648-f3ec864a92e7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.417080] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.417305] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-974ccb6b-c328-4757-ace4-af91a72da375 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.423116] env[61998]: DEBUG oslo_vmware.api [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 836.423116] env[61998]: value = "task-1388498" [ 836.423116] env[61998]: _type = "Task" [ 836.423116] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.430798] env[61998]: DEBUG oslo_vmware.api [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.472773] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388497, 'name': ReconfigVM_Task, 'duration_secs': 0.362711} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.473075] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Reconfigured VM instance instance-00000041 to attach disk [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.473800] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6b6900e-b373-4bfe-9c27-1b683c0fa39c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.479990] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 836.479990] env[61998]: value = "task-1388499" [ 836.479990] env[61998]: _type = "Task" [ 836.479990] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.488306] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388499, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.636936] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52944ef4-a126-8afc-4392-bd4f47b77c6d, 'name': SearchDatastore_Task, 'duration_secs': 0.014089} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.637210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.637472] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.637725] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72fabec5-c1bf-435a-9a46-554b8fdbb147 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.644372] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 836.644372] env[61998]: value = "task-1388500" [ 836.644372] env[61998]: _type = "Task" [ 836.644372] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.649192] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Releasing lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.649500] env[61998]: DEBUG nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Instance network_info: |[{"id": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "address": "fa:16:3e:6e:db:74", "network": {"id": "9dcc2d50-24ed-410c-9c13-3d8d140b5cee", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1965897712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df09ba4531ae4b1e8e83f9b382b82c5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f6d1-79", "ovs_interfaceid": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 836.652633] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:db:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e51ebca-e0f8-4b77-b155-4ff928eef130', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.661408] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Creating folder: Project (df09ba4531ae4b1e8e83f9b382b82c5c). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.661729] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.662240] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b8657cc-e961-4b4c-99d8-a985844ca1f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.672944] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Created folder: Project (df09ba4531ae4b1e8e83f9b382b82c5c) in parent group-v294665. [ 836.673145] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Creating folder: Instances. Parent ref: group-v294703. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.673364] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-037d9639-a96c-4fb2-a1bd-596e79dcb198 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.684189] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Created folder: Instances in parent group-v294703. [ 836.684427] env[61998]: DEBUG oslo.service.loopingcall [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.686575] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.686989] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cce8182a-c5dc-44c7-9fbc-69180f535aca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.703896] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.709205] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.709205] env[61998]: value = "task-1388503" [ 836.709205] env[61998]: _type = "Task" [ 836.709205] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.716631] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388503, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.805304] env[61998]: DEBUG nova.compute.utils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.810207] env[61998]: DEBUG nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 836.810207] env[61998]: DEBUG nova.network.neutron [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.870590] env[61998]: DEBUG nova.compute.manager [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Received event network-changed-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 836.870824] env[61998]: DEBUG nova.compute.manager [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Refreshing instance network info cache due to event network-changed-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 836.871019] env[61998]: DEBUG oslo_concurrency.lockutils [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] Acquiring lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.871247] env[61998]: DEBUG oslo_concurrency.lockutils [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] Acquired lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.871573] env[61998]: DEBUG nova.network.neutron [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Refreshing network info cache for port 2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.906504] env[61998]: DEBUG nova.policy [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45e5c7148ac343ee8674cf6747d7df0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102883704d52434591e74440e02262fb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 836.936035] env[61998]: DEBUG oslo_vmware.api [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388498, 'name': PowerOffVM_Task, 'duration_secs': 0.197944} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.936191] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.936386] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.936674] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-618191f1-d87d-4e7d-86f4-10bd78c60224 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.995156] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388499, 'name': Rename_Task, 'duration_secs': 0.151657} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.997958] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.998917] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb7585a2-e567-4b04-87db-0bf9c81ff9b5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.006709] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 837.006709] env[61998]: value = "task-1388505" [ 837.006709] env[61998]: _type = "Task" [ 837.006709] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.017458] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388505, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.023788] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.023923] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.024125] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Deleting the datastore file [datastore1] c55717f0-8ef2-4e55-b1cf-60f6faea9e5e {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.024448] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bc84b48-f972-45c2-9548-7df4038848dc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.032598] env[61998]: DEBUG oslo_vmware.api [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for the task: (returnval){ [ 837.032598] env[61998]: value = "task-1388506" [ 837.032598] env[61998]: _type = "Task" [ 837.032598] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.042553] env[61998]: DEBUG oslo_vmware.api [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.118977] env[61998]: INFO nova.scheduler.client.report [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted allocations for instance b3232fcd-43b2-4139-afe1-fbe863d0af30 [ 837.146894] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b8bf15-b69d-4eaa-a34b-3fced984c7fc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.161957] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3518d9-02af-402b-b389-088939f46300 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.165833] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388500, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.201927] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bee29ac-6bd7-4a6f-89e2-483d49b6e294 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.209704] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07bcde0-d5ea-40f4-aba2-e751c0242f83 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.226803] env[61998]: DEBUG nova.compute.provider_tree [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.230470] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388503, 'name': CreateVM_Task, 'duration_secs': 0.504371} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.230827] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 837.231518] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.231680] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.231997] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 837.232387] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e0b7bc1-1238-4425-a9e1-cfd39860cdf0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.237142] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 837.237142] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]521d6940-07d5-b629-5491-00fe1e06807b" [ 837.237142] env[61998]: _type = "Task" [ 837.237142] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.245500] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521d6940-07d5-b629-5491-00fe1e06807b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.265540] env[61998]: DEBUG nova.network.neutron [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Successfully created port: d4d0a8be-1992-48b5-b1ed-4cf17b442314 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.310090] env[61998]: DEBUG nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 837.520265] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388505, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.550986] env[61998]: DEBUG oslo_vmware.api [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Task: {'id': task-1388506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288799} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.551284] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.551471] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.551647] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.551823] env[61998]: INFO nova.compute.manager [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 837.552082] env[61998]: DEBUG oslo.service.loopingcall [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.552678] env[61998]: DEBUG nova.compute.manager [-] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 837.552678] env[61998]: DEBUG nova.network.neutron [-] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.635290] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ed232131-9658-494d-b963-c0adf4cce622 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "b3232fcd-43b2-4139-afe1-fbe863d0af30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.319s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.656430] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523767} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.657025] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 837.657025] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.657221] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a9e51ea-b656-44f4-9c3e-b38644165be5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.665333] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 837.665333] env[61998]: value = "task-1388507" [ 837.665333] env[61998]: _type = "Task" [ 837.665333] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.676994] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388507, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.732271] env[61998]: DEBUG nova.scheduler.client.report [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 837.748319] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521d6940-07d5-b629-5491-00fe1e06807b, 'name': SearchDatastore_Task, 'duration_secs': 0.009487} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.749067] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.749304] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.749543] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.749692] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.749872] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.750879] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccdd85d4-773c-4b13-8c7e-207d5560ac98 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.762681] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.762939] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.763734] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35efaf3b-22f3-4aab-a0db-c04552d0138f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.771790] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 837.771790] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52a61e0d-3658-8d4b-42dd-a4e08711fb43" [ 837.771790] env[61998]: _type = "Task" [ 837.771790] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.780236] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a61e0d-3658-8d4b-42dd-a4e08711fb43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.818054] env[61998]: DEBUG nova.network.neutron [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updated VIF entry in instance network info cache for port 2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.818403] env[61998]: DEBUG nova.network.neutron [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updating instance_info_cache with network_info: [{"id": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "address": "fa:16:3e:6e:db:74", "network": {"id": "9dcc2d50-24ed-410c-9c13-3d8d140b5cee", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1965897712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df09ba4531ae4b1e8e83f9b382b82c5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f6d1-79", "ovs_interfaceid": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.017172] env[61998]: DEBUG oslo_vmware.api [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388505, 'name': PowerOnVM_Task, 'duration_secs': 0.621083} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.017525] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.017696] env[61998]: INFO nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Took 8.81 seconds to spawn the instance on the hypervisor. [ 838.017882] env[61998]: DEBUG nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 838.018659] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a98f088-1ee3-414d-966d-a26dc6445838 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.138582] env[61998]: DEBUG nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 838.174523] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388507, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.256327} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.174831] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 838.175599] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7d8e37-1b54-4611-968f-e999aac1773e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.199125] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 838.199706] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30616d22-56b3-4a07-bf36-3d2469794615 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.220414] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 838.220414] env[61998]: value = "task-1388508" [ 838.220414] env[61998]: _type = "Task" [ 838.220414] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.229781] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388508, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.239133] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.940s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.239815] env[61998]: ERROR nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Traceback (most recent call last): [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self.driver.spawn(context, instance, image_meta, [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] vm_ref = self.build_virtual_machine(instance, [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] vif_infos = vmwarevif.get_vif_info(self._session, [ 838.239815] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] for vif in network_info: [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return self._sync_wrapper(fn, *args, **kwargs) [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self.wait() [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self[:] = self._gt.wait() [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return self._exit_event.wait() [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] result = hub.switch() [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 838.240553] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return self.greenlet.switch() [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] result = function(*args, **kwargs) [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] return func(*args, **kwargs) [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] raise e [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] nwinfo = self.network_api.allocate_for_instance( [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] created_port_ids = self._update_ports_for_instance( [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] with excutils.save_and_reraise_exception(): [ 838.241195] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] self.force_reraise() [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] raise self.value [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] updated_port = self._update_port( [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] _ensure_no_port_binding_failure(port) [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] raise exception.PortBindingFailed(port_id=port['id']) [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] nova.exception.PortBindingFailed: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. [ 838.241510] env[61998]: ERROR nova.compute.manager [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] [ 838.241784] env[61998]: DEBUG nova.compute.utils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 838.241784] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.181s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.247315] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Build of instance 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf was re-scheduled: Binding failed for port 5685b696-3b99-4d26-8e53-1bcd8e90accb, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 838.247920] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 838.248267] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Acquiring lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.248469] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Acquired lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.248651] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.283394] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a61e0d-3658-8d4b-42dd-a4e08711fb43, 'name': SearchDatastore_Task, 'duration_secs': 0.050687} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.284310] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18ad7197-c8fc-4cf2-b276-0f068e7cbe5c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.290038] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 838.290038] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c4d80c-03fe-4f78-0031-71a82487cea6" [ 838.290038] env[61998]: _type = "Task" [ 838.290038] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.299224] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c4d80c-03fe-4f78-0031-71a82487cea6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.322384] env[61998]: DEBUG nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 838.324835] env[61998]: DEBUG oslo_concurrency.lockutils [req-b5b0979f-97e6-4d1a-810c-a81acaac0dbe req-42fd5fd0-c0a8-4166-bd29-cae30db8e233 service nova] Releasing lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.343574] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 838.343833] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 838.343994] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.344193] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 838.344338] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.344483] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 838.344736] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 838.344895] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 838.345083] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 838.345254] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 838.345439] env[61998]: DEBUG nova.virt.hardware [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 838.346320] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eec9eb-548b-4da4-958a-78b665b5938c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.348917] env[61998]: DEBUG nova.network.neutron [-] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.355869] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92b4894-ad73-4bcf-bfce-7b5501955819 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.535983] env[61998]: INFO nova.compute.manager [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Took 28.53 seconds to build instance. [ 838.662929] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.729759] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388508, 'name': ReconfigVM_Task, 'duration_secs': 0.457948} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.735270] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.735270] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d19ea80-fcd8-404c-acee-dc0cc4ff251c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.740500] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 838.740500] env[61998]: value = "task-1388509" [ 838.740500] env[61998]: _type = "Task" [ 838.740500] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.757351] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388509, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.785947] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.800332] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c4d80c-03fe-4f78-0031-71a82487cea6, 'name': SearchDatastore_Task, 'duration_secs': 0.016241} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.800332] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.800556] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] dadd9985-bca3-4207-927f-9490e0ae3f10/dadd9985-bca3-4207-927f-9490e0ae3f10.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.800771] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-696be08d-baca-4f83-b95b-8c62a8f389fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.809489] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 838.809489] env[61998]: value = "task-1388510" [ 838.809489] env[61998]: _type = "Task" [ 838.809489] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.817079] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.854571] env[61998]: INFO nova.compute.manager [-] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Took 1.30 seconds to deallocate network for instance. [ 838.944069] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.022370] env[61998]: DEBUG nova.compute.manager [req-5ba0e095-799e-4957-a9f9-2b9b8af10120 req-9024604c-9dea-4658-9dbf-f54089f6eb25 service nova] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Received event network-vif-deleted-4f851cfd-9ab8-4add-99ef-c7c946ce98b7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 839.038045] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a452c2c1-3c00-4e51-ac2b-fb2adca8f85b tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.480s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.081741] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3721fec6-6b21-466d-9b0d-0841267cdd82 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.090170] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374fa947-1023-4b9f-9274-accc308e26b0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.123746] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30e21df-0ca0-4bc5-b3e1-4ae5e09a0a8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.133188] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0006a2d-d791-4c0b-9d05-c38adecd7497 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.148309] env[61998]: DEBUG nova.compute.provider_tree [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.167295] env[61998]: DEBUG nova.network.neutron [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Successfully updated port: d4d0a8be-1992-48b5-b1ed-4cf17b442314 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.253878] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388509, 'name': Rename_Task, 'duration_secs': 0.227524} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.254115] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.254391] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bca041d-36e9-4b24-b176-838abf26e9bc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.262091] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 839.262091] env[61998]: value = "task-1388511" [ 839.262091] env[61998]: _type = "Task" [ 839.262091] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.271550] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.321113] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388510, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.362348] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.446647] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Releasing lock "refresh_cache-55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.447496] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 839.447496] env[61998]: DEBUG nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 839.447496] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.474641] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.513454] env[61998]: INFO nova.compute.manager [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Rebuilding instance [ 839.541054] env[61998]: DEBUG nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 839.564726] env[61998]: DEBUG nova.compute.manager [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 839.564726] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3bbd96-aa23-4669-a1dd-158681746c9d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.652622] env[61998]: DEBUG nova.scheduler.client.report [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 839.670015] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "refresh_cache-c84d15dc-0ef2-44e2-b579-104678a6bb07" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.670177] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "refresh_cache-c84d15dc-0ef2-44e2-b579-104678a6bb07" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.670325] env[61998]: DEBUG nova.network.neutron [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.777153] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388511, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.823305] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.724282} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.823305] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] dadd9985-bca3-4207-927f-9490e0ae3f10/dadd9985-bca3-4207-927f-9490e0ae3f10.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.823305] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.823305] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49c6e402-fd1d-4137-8656-74811ca8a673 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.829576] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 839.829576] env[61998]: value = "task-1388512" [ 839.829576] env[61998]: _type = "Task" [ 839.829576] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.836770] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388512, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.977096] env[61998]: DEBUG nova.network.neutron [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.060654] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.156542] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.157051] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.157414] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.158080] env[61998]: ERROR nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Traceback (most recent call last): [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self.driver.spawn(context, instance, image_meta, [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 548, in spawn [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] vm_ref = self.build_virtual_machine(instance, [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] vif_infos = vmwarevif.get_vif_info(self._session, [ 840.158080] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] for vif in network_info: [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] return self._sync_wrapper(fn, *args, **kwargs) [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self.wait() [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self[:] = self._gt.wait() [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] return self._exit_event.wait() [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] current.throw(*self._exc) [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 840.158501] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] result = function(*args, **kwargs) [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] return func(*args, **kwargs) [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] raise e [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] nwinfo = self.network_api.allocate_for_instance( [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] created_port_ids = self._update_ports_for_instance( [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] with excutils.save_and_reraise_exception(): [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] self.force_reraise() [ 840.158829] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] raise self.value [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] updated_port = self._update_port( [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] _ensure_no_port_binding_failure(port) [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] raise exception.PortBindingFailed(port_id=port['id']) [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] nova.exception.PortBindingFailed: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. [ 840.159194] env[61998]: ERROR nova.compute.manager [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] [ 840.159194] env[61998]: DEBUG nova.compute.utils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. {{(pid=61998) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 840.159841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.704s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.161740] env[61998]: INFO nova.compute.claims [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.165622] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Build of instance 169437f2-fb18-4d5c-8d00-b82e9e5752d5 was re-scheduled: Binding failed for port 81dae91d-8920-4195-8367-94d44047f3eb, please check neutron logs for more information. {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 840.167299] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Unplugging VIFs for instance {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 840.167299] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Acquiring lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.167299] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Acquired lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.167299] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.210549] env[61998]: DEBUG nova.network.neutron [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.274048] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388511, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.340046] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388512, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065303} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.340503] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 840.341384] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371aa3cb-e1ea-4f0e-aa95-b32187be7ff2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.365399] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] dadd9985-bca3-4207-927f-9490e0ae3f10/dadd9985-bca3-4207-927f-9490e0ae3f10.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.366415] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1832b6d0-59b5-49da-b50d-932adcce51f1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.386814] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 840.386814] env[61998]: value = "task-1388513" [ 840.386814] env[61998]: _type = "Task" [ 840.386814] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.395075] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388513, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.422382] env[61998]: DEBUG nova.network.neutron [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Updating instance_info_cache with network_info: [{"id": "d4d0a8be-1992-48b5-b1ed-4cf17b442314", "address": "fa:16:3e:67:a1:97", "network": {"id": "ec2debd7-c78c-40f3-9bd5-7b5fd01dcf33", "bridge": "br-int", "label": "tempest-ImagesTestJSON-835911222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102883704d52434591e74440e02262fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4d0a8be-19", "ovs_interfaceid": "d4d0a8be-1992-48b5-b1ed-4cf17b442314", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.481557] env[61998]: INFO nova.compute.manager [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] [instance: 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf] Took 1.03 seconds to deallocate network for instance. [ 840.576127] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.576467] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f84546ac-1915-4394-98e6-9ffb243d087b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.584132] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 840.584132] env[61998]: value = "task-1388514" [ 840.584132] env[61998]: _type = "Task" [ 840.584132] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.593639] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.692264] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.773109] env[61998]: DEBUG oslo_vmware.api [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388511, 'name': PowerOnVM_Task, 'duration_secs': 1.129539} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.773394] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 840.773660] env[61998]: INFO nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Took 9.15 seconds to spawn the instance on the hypervisor. [ 840.773938] env[61998]: DEBUG nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 840.774786] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68aa48e2-7b0c-4755-93b3-68589ef7748a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.801887] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.897764] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388513, 'name': ReconfigVM_Task, 'duration_secs': 0.275201} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.898152] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Reconfigured VM instance instance-00000043 to attach disk [datastore2] dadd9985-bca3-4207-927f-9490e0ae3f10/dadd9985-bca3-4207-927f-9490e0ae3f10.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.898763] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd6b1fdb-8077-4227-b372-ef0fb897bac7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.904999] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 840.904999] env[61998]: value = "task-1388515" [ 840.904999] env[61998]: _type = "Task" [ 840.904999] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.912542] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388515, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.925210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "refresh_cache-c84d15dc-0ef2-44e2-b579-104678a6bb07" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.925550] env[61998]: DEBUG nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Instance network_info: |[{"id": "d4d0a8be-1992-48b5-b1ed-4cf17b442314", "address": "fa:16:3e:67:a1:97", "network": {"id": "ec2debd7-c78c-40f3-9bd5-7b5fd01dcf33", "bridge": "br-int", "label": "tempest-ImagesTestJSON-835911222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102883704d52434591e74440e02262fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4d0a8be-19", "ovs_interfaceid": "d4d0a8be-1992-48b5-b1ed-4cf17b442314", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 840.925978] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:a1:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d377d75-3add-4a15-8691-74b2eb010924', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4d0a8be-1992-48b5-b1ed-4cf17b442314', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.933834] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Creating folder: Project (102883704d52434591e74440e02262fb). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.934185] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b80a246-e4e5-4bd3-95d5-c88f51851c28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.944308] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Created folder: Project (102883704d52434591e74440e02262fb) in parent group-v294665. [ 840.944584] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Creating folder: Instances. Parent ref: group-v294706. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.944876] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af693338-154a-4804-a9ea-fc0dfb60d1d8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.954552] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Created folder: Instances in parent group-v294706. [ 840.954798] env[61998]: DEBUG oslo.service.loopingcall [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.954997] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.955225] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59c85287-30fb-4eb1-bef4-e1be9f314c60 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.973897] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.973897] env[61998]: value = "task-1388518" [ 840.973897] env[61998]: _type = "Task" [ 840.973897] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.981253] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388518, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.074669] env[61998]: DEBUG nova.compute.manager [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Received event network-vif-plugged-d4d0a8be-1992-48b5-b1ed-4cf17b442314 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 841.074948] env[61998]: DEBUG oslo_concurrency.lockutils [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] Acquiring lock "c84d15dc-0ef2-44e2-b579-104678a6bb07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.075144] env[61998]: DEBUG oslo_concurrency.lockutils [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.075318] env[61998]: DEBUG oslo_concurrency.lockutils [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.075489] env[61998]: DEBUG nova.compute.manager [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] No waiting events found dispatching network-vif-plugged-d4d0a8be-1992-48b5-b1ed-4cf17b442314 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 841.075665] env[61998]: WARNING nova.compute.manager [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Received unexpected event network-vif-plugged-d4d0a8be-1992-48b5-b1ed-4cf17b442314 for instance with vm_state building and task_state spawning. [ 841.075832] env[61998]: DEBUG nova.compute.manager [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Received event network-changed-d4d0a8be-1992-48b5-b1ed-4cf17b442314 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 841.076036] env[61998]: DEBUG nova.compute.manager [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Refreshing instance network info cache due to event network-changed-d4d0a8be-1992-48b5-b1ed-4cf17b442314. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 841.076233] env[61998]: DEBUG oslo_concurrency.lockutils [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] Acquiring lock "refresh_cache-c84d15dc-0ef2-44e2-b579-104678a6bb07" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.076386] env[61998]: DEBUG oslo_concurrency.lockutils [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] Acquired lock "refresh_cache-c84d15dc-0ef2-44e2-b579-104678a6bb07" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.076553] env[61998]: DEBUG nova.network.neutron [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Refreshing network info cache for port d4d0a8be-1992-48b5-b1ed-4cf17b442314 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.093870] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388514, 'name': PowerOffVM_Task, 'duration_secs': 0.229172} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.094133] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.094364] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 841.095168] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888e33e3-75d1-46ec-903b-b6c8808d156f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.102059] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 841.102302] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f1f3cc7-9902-4d19-8e98-fe21e08afccb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.295230] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 841.295424] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 841.295468] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleting the datastore file [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.295962] env[61998]: INFO nova.compute.manager [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Took 24.94 seconds to build instance. [ 841.296757] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9631770-931a-4824-8d6c-da58d3c1b1be {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.305821] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Releasing lock "refresh_cache-169437f2-fb18-4d5c-8d00-b82e9e5752d5" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.306040] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61998) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 841.306276] env[61998]: DEBUG nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 841.306461] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 841.309014] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 841.309014] env[61998]: value = "task-1388520" [ 841.309014] env[61998]: _type = "Task" [ 841.309014] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.318330] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.335713] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.414505] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388515, 'name': Rename_Task, 'duration_secs': 0.131292} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.416995] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.418369] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4940acf7-6e7c-457d-8a92-1f1f4f7fe8db {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.427587] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 841.427587] env[61998]: value = "task-1388521" [ 841.427587] env[61998]: _type = "Task" [ 841.427587] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.433043] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.452054] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b339ff-4665-4fdd-81d4-9c0be96eee1b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.459049] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77e3678-8553-4295-a07e-e53caf066f6a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.492210] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cff145-1239-4121-9d91-a74ba781461c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.503367] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388518, 'name': CreateVM_Task, 'duration_secs': 0.354073} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.505412] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 841.505412] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.505412] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.505699] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.507675] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b345d2fa-6eb0-4fe6-80ff-5662ae04f85d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.511500] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a11803a8-30d3-4fd1-97fd-b9a7116c7ded {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.524158] env[61998]: DEBUG nova.compute.provider_tree [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.526700] env[61998]: INFO nova.scheduler.client.report [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Deleted allocations for instance 55c6ecdc-0e84-4399-8f1b-307b1c69dcdf [ 841.532014] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 841.532014] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52d4248d-ab1b-d84c-f064-0db77219a1d4" [ 841.532014] env[61998]: _type = "Task" [ 841.532014] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.543918] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52d4248d-ab1b-d84c-f064-0db77219a1d4, 'name': SearchDatastore_Task, 'duration_secs': 0.008507} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.544220] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.544441] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.544707] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.544878] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.545073] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.546104] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd26d769-6767-4300-a6c9-aebf7fdc4b97 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.553710] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.554178] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.554868] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a3d6c10-4bab-400a-9537-0d3993f0b667 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.562034] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 841.562034] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528578db-3c5b-5381-e7c4-7473b44ff3dc" [ 841.562034] env[61998]: _type = "Task" [ 841.562034] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.569341] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528578db-3c5b-5381-e7c4-7473b44ff3dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.801282] env[61998]: DEBUG oslo_concurrency.lockutils [None req-6f9fd8c6-07fb-4012-9ace-182266112e57 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.167s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.820134] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.839565] env[61998]: DEBUG nova.network.neutron [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.936123] env[61998]: DEBUG oslo_vmware.api [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388521, 'name': PowerOnVM_Task, 'duration_secs': 0.470837} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.936123] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.936218] env[61998]: INFO nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Took 7.94 seconds to spawn the instance on the hypervisor. [ 841.938017] env[61998]: DEBUG nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 841.938017] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0351d2-df51-431d-8891-60934f195ef8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.972482] env[61998]: DEBUG nova.network.neutron [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Updated VIF entry in instance network info cache for port d4d0a8be-1992-48b5-b1ed-4cf17b442314. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.972859] env[61998]: DEBUG nova.network.neutron [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Updating instance_info_cache with network_info: [{"id": "d4d0a8be-1992-48b5-b1ed-4cf17b442314", "address": "fa:16:3e:67:a1:97", "network": {"id": "ec2debd7-c78c-40f3-9bd5-7b5fd01dcf33", "bridge": "br-int", "label": "tempest-ImagesTestJSON-835911222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102883704d52434591e74440e02262fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4d0a8be-19", "ovs_interfaceid": "d4d0a8be-1992-48b5-b1ed-4cf17b442314", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.033357] env[61998]: DEBUG nova.scheduler.client.report [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 842.037334] env[61998]: DEBUG oslo_concurrency.lockutils [None req-281fab31-c22c-4fc2-9095-666a02f118a7 tempest-FloatingIPsAssociationTestJSON-2091197507 tempest-FloatingIPsAssociationTestJSON-2091197507-project-member] Lock "55c6ecdc-0e84-4399-8f1b-307b1c69dcdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.524s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.072022] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528578db-3c5b-5381-e7c4-7473b44ff3dc, 'name': SearchDatastore_Task, 'duration_secs': 0.008499} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.072790] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53c2d98c-a2ce-4dd2-850b-ca5d7d2799c8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.077646] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 842.077646] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa6573-bfa6-67bd-e8ed-55ea6b5c12ee" [ 842.077646] env[61998]: _type = "Task" [ 842.077646] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.085596] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa6573-bfa6-67bd-e8ed-55ea6b5c12ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.303266] env[61998]: DEBUG nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 842.320268] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.342806] env[61998]: INFO nova.compute.manager [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] [instance: 169437f2-fb18-4d5c-8d00-b82e9e5752d5] Took 1.04 seconds to deallocate network for instance. [ 842.454832] env[61998]: INFO nova.compute.manager [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Took 25.11 seconds to build instance. [ 842.476059] env[61998]: DEBUG oslo_concurrency.lockutils [req-549443c9-7749-4fe9-95e2-731077dea0c3 req-5aeb2e79-641f-4e97-bf68-d9cac80eb5fb service nova] Releasing lock "refresh_cache-c84d15dc-0ef2-44e2-b579-104678a6bb07" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.538615] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.539162] env[61998]: DEBUG nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 842.543017] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.426s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.543017] env[61998]: DEBUG nova.objects.instance [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61998) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 842.544664] env[61998]: DEBUG nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 842.592347] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa6573-bfa6-67bd-e8ed-55ea6b5c12ee, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.592597] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.592886] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] c84d15dc-0ef2-44e2-b579-104678a6bb07/c84d15dc-0ef2-44e2-b579-104678a6bb07.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.593156] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f56ad08d-c26e-496e-a3cb-0cd132ec052f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.601372] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 842.601372] env[61998]: value = "task-1388522" [ 842.601372] env[61998]: _type = "Task" [ 842.601372] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.612768] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.825528] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.838295] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.957900] env[61998]: DEBUG oslo_concurrency.lockutils [None req-345d3a46-5e7e-4fdf-94bf-ed647c1abdf3 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.604s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.054304] env[61998]: DEBUG nova.compute.utils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 843.061461] env[61998]: DEBUG nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 843.061461] env[61998]: DEBUG nova.network.neutron [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 843.092240] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.112709] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388522, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505298} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.113046] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] c84d15dc-0ef2-44e2-b579-104678a6bb07/c84d15dc-0ef2-44e2-b579-104678a6bb07.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.113306] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.113592] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-acd21e63-0072-4318-8e18-ee877e5e175d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.118789] env[61998]: DEBUG nova.compute.manager [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 843.119027] env[61998]: DEBUG nova.compute.manager [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing instance network info cache due to event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 843.119271] env[61998]: DEBUG oslo_concurrency.lockutils [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.119684] env[61998]: DEBUG oslo_concurrency.lockutils [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.119900] env[61998]: DEBUG nova.network.neutron [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 843.123398] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 843.123398] env[61998]: value = "task-1388523" [ 843.123398] env[61998]: _type = "Task" [ 843.123398] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.132529] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388523, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.169013] env[61998]: DEBUG nova.policy [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 843.324842] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.85311} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.325268] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.325417] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.325652] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.384498] env[61998]: INFO nova.scheduler.client.report [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Deleted allocations for instance 169437f2-fb18-4d5c-8d00-b82e9e5752d5 [ 843.460654] env[61998]: DEBUG nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 843.557829] env[61998]: DEBUG oslo_concurrency.lockutils [None req-57991593-1c12-433e-ae19-888531e6e40c tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.561622] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.323s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.563254] env[61998]: INFO nova.compute.claims [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 843.566918] env[61998]: DEBUG nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 843.640445] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388523, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062392} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.641100] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.642015] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19ed4dc-0b35-4cc9-8879-ad67e31fc1ca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.666487] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] c84d15dc-0ef2-44e2-b579-104678a6bb07/c84d15dc-0ef2-44e2-b579-104678a6bb07.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.666487] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23e7e538-50be-416e-8653-abc1a57975e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.690717] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 843.690717] env[61998]: value = "task-1388524" [ 843.690717] env[61998]: _type = "Task" [ 843.690717] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.700510] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388524, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.746027] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.746415] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.898001] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9d520bb2-a697-47c5-9f25-83b74e6ec079 tempest-ServerActionsTestOtherA-456391608 tempest-ServerActionsTestOtherA-456391608-project-member] Lock "169437f2-fb18-4d5c-8d00-b82e9e5752d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.527s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.989961] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.009024] env[61998]: DEBUG nova.network.neutron [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Successfully created port: e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.197061] env[61998]: DEBUG nova.network.neutron [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updated VIF entry in instance network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.197417] env[61998]: DEBUG nova.network.neutron [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.205935] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388524, 'name': ReconfigVM_Task, 'duration_secs': 0.317105} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.206292] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Reconfigured VM instance instance-00000044 to attach disk [datastore2] c84d15dc-0ef2-44e2-b579-104678a6bb07/c84d15dc-0ef2-44e2-b579-104678a6bb07.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.206947] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acfc94ca-aa97-4e34-bb2d-7d4e2cc295ad {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.213594] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 844.213594] env[61998]: value = "task-1388525" [ 844.213594] env[61998]: _type = "Task" [ 844.213594] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.222845] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388525, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.255856] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 844.256036] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 844.256157] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Rebuilding the list of instances to heal {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10231}} [ 844.369758] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.370093] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.370213] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.370394] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.370538] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.370681] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.370889] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.371734] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.371938] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.372606] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.372606] env[61998]: DEBUG nova.virt.hardware [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.373399] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a6c0a3-ea79-4189-b159-d1d87c221c6d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.385875] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6beaf102-a086-4e76-a836-00db3cb66615 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.401209] env[61998]: DEBUG nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 844.404015] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:bf:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.413674] env[61998]: DEBUG oslo.service.loopingcall [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.413887] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.414122] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbaa71ef-5733-4517-b24d-d4d3d4e9047a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.435328] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.435328] env[61998]: value = "task-1388526" [ 844.435328] env[61998]: _type = "Task" [ 844.435328] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.445559] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388526, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.587781] env[61998]: DEBUG nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 844.611420] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.611640] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.611872] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.612105] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.612274] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.612460] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.612739] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.612945] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.613183] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.613463] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.613705] env[61998]: DEBUG nova.virt.hardware [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.614707] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98640bd9-ec68-4cc9-bf58-717881db755c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.624227] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d71bb14-a6f3-4614-90c7-a0f1aeaca065 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.703114] env[61998]: DEBUG oslo_concurrency.lockutils [req-5ae0b429-2b87-4f9e-845e-8e4da969d3f1 req-24d6a097-ca0a-4ff7-a819-5548be681a60 service nova] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.724661] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388525, 'name': Rename_Task, 'duration_secs': 0.150747} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.724939] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.725234] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4344dbb6-d430-46bc-8072-b5776ecdffaa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.734121] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 844.734121] env[61998]: value = "task-1388527" [ 844.734121] env[61998]: _type = "Task" [ 844.734121] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.742992] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388527, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.759960] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 844.760146] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 844.760286] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 844.777526] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "refresh_cache-5eb786f1-7789-48a0-a04e-a4039e387f58" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.777686] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquired lock "refresh_cache-5eb786f1-7789-48a0-a04e-a4039e387f58" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.777955] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Forcefully refreshing network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 844.778014] env[61998]: DEBUG nova.objects.instance [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lazy-loading 'info_cache' on Instance uuid 5eb786f1-7789-48a0-a04e-a4039e387f58 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.822409] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba99c9f8-dbd2-4d09-8738-27c274dee3e6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.830450] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ea6f65-2a28-4511-a98f-1dca0497eab7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.863227] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ece600-934e-4847-9070-5d7a2b64895e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.870528] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3e4ae3-dd8b-41cd-89cb-36050ff89585 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.883493] env[61998]: DEBUG nova.compute.provider_tree [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.933111] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.946658] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388526, 'name': CreateVM_Task, 'duration_secs': 0.359804} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.946866] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.947724] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.947931] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.948349] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.948651] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b578d342-8a88-4a5f-a6c6-38f32e0dad28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.953695] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 844.953695] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524f556c-3905-0503-feed-1df6570d1341" [ 844.953695] env[61998]: _type = "Task" [ 844.953695] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.962483] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524f556c-3905-0503-feed-1df6570d1341, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.151596] env[61998]: DEBUG nova.compute.manager [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Received event network-changed-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 845.151835] env[61998]: DEBUG nova.compute.manager [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Refreshing instance network info cache due to event network-changed-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 845.152065] env[61998]: DEBUG oslo_concurrency.lockutils [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] Acquiring lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.152210] env[61998]: DEBUG oslo_concurrency.lockutils [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] Acquired lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.152370] env[61998]: DEBUG nova.network.neutron [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Refreshing network info cache for port 2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.244505] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388527, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.386552] env[61998]: DEBUG nova.scheduler.client.report [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 845.464620] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524f556c-3905-0503-feed-1df6570d1341, 'name': SearchDatastore_Task, 'duration_secs': 0.013087} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.464963] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.465222] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.465449] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.465637] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.465873] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.466144] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d2c96fa-353a-49fe-a232-5a2598bb1ccb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.474840] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.475051] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.475758] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f85348f-7419-40a9-9fe3-8046b4cc19d1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.481077] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 845.481077] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528f768f-37db-7ca7-f2d3-7999a7efda4c" [ 845.481077] env[61998]: _type = "Task" [ 845.481077] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.488791] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528f768f-37db-7ca7-f2d3-7999a7efda4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.748598] env[61998]: DEBUG oslo_vmware.api [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388527, 'name': PowerOnVM_Task, 'duration_secs': 0.566519} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.749217] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.749592] env[61998]: INFO nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Took 7.43 seconds to spawn the instance on the hypervisor. [ 845.749887] env[61998]: DEBUG nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 845.750915] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ac6c55-bb1b-46d3-a191-cd783b7215fa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.805789] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.828403] env[61998]: DEBUG nova.network.neutron [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Successfully updated port: e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.895310] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.895310] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 845.897852] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.554s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.898084] env[61998]: DEBUG nova.objects.instance [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lazy-loading 'resources' on Instance uuid a7225abb-d8ea-49fc-85da-7791d9dde5bc {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.986653] env[61998]: DEBUG nova.network.neutron [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updated VIF entry in instance network info cache for port 2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.987206] env[61998]: DEBUG nova.network.neutron [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updating instance_info_cache with network_info: [{"id": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "address": "fa:16:3e:6e:db:74", "network": {"id": "9dcc2d50-24ed-410c-9c13-3d8d140b5cee", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1965897712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df09ba4531ae4b1e8e83f9b382b82c5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f6d1-79", "ovs_interfaceid": "2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.995307] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528f768f-37db-7ca7-f2d3-7999a7efda4c, 'name': SearchDatastore_Task, 'duration_secs': 0.045802} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.996123] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a46d3b65-f32a-48b6-92d9-4d745893f0a8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.001529] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 846.001529] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529f53d6-b705-e5b1-08e0-414f7b77eeee" [ 846.001529] env[61998]: _type = "Task" [ 846.001529] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.010592] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529f53d6-b705-e5b1-08e0-414f7b77eeee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.270466] env[61998]: INFO nova.compute.manager [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Took 27.12 seconds to build instance. [ 846.334507] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.334789] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.334982] env[61998]: DEBUG nova.network.neutron [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.373065] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.403261] env[61998]: DEBUG nova.compute.utils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 846.407724] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 846.407866] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 846.449447] env[61998]: DEBUG nova.policy [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5f2c10ceb02461095c74c83d4a745d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e4e8f3e000f4c2383b3a39f24499577', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 846.490820] env[61998]: DEBUG oslo_concurrency.lockutils [req-05133177-4d5a-42e0-bd84-297776ca0ca7 req-495eff62-87f7-43da-bf93-2854a9d181fe service nova] Releasing lock "refresh_cache-dadd9985-bca3-4207-927f-9490e0ae3f10" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.515136] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529f53d6-b705-e5b1-08e0-414f7b77eeee, 'name': SearchDatastore_Task, 'duration_secs': 0.01745} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.518725] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.518725] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.518725] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8721cf84-a1e0-49b7-a353-265cc21fd6f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.527997] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 846.527997] env[61998]: value = "task-1388528" [ 846.527997] env[61998]: _type = "Task" [ 846.527997] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.539637] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.664337] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76ac25c-41a6-42c2-9ddd-4e29c3bc8a1f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.671816] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae950ba-2f3a-42b1-b67d-18d1b9b7827f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.709482] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50729ad0-6c35-4a36-bf58-3027ccfc0d13 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.717393] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6c3d37-a423-452b-83a3-2bb682a65414 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.731466] env[61998]: DEBUG nova.compute.provider_tree [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.773067] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f1c8a928-d422-435b-b8f6-cc001ad10541 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.291s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.779488] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Successfully created port: a6cc2f5c-f8f0-4800-9361-1ecc4455015b {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.875316] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Releasing lock "refresh_cache-5eb786f1-7789-48a0-a04e-a4039e387f58" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.875549] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Updated the network info_cache for instance {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10298}} [ 846.875777] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.875936] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.876152] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.876342] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.876487] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.876632] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.876762] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 846.876905] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.889993] env[61998]: DEBUG nova.network.neutron [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.908157] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 847.044986] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388528, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509518} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.045302] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.045523] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.045864] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4404810f-2dc1-40f0-978c-f88e35659a4e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.056653] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 847.056653] env[61998]: value = "task-1388529" [ 847.056653] env[61998]: _type = "Task" [ 847.056653] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.066034] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.198208] env[61998]: DEBUG nova.network.neutron [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.227219] env[61998]: DEBUG nova.compute.manager [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received event network-vif-plugged-e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 847.227433] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.227688] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.227812] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.227973] env[61998]: DEBUG nova.compute.manager [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] No waiting events found dispatching network-vif-plugged-e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 847.229314] env[61998]: WARNING nova.compute.manager [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received unexpected event network-vif-plugged-e9f140c3-2474-433a-acc9-85eb29ac21cc for instance with vm_state building and task_state spawning. [ 847.229508] env[61998]: DEBUG nova.compute.manager [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received event network-changed-e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 847.229827] env[61998]: DEBUG nova.compute.manager [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Refreshing instance network info cache due to event network-changed-e9f140c3-2474-433a-acc9-85eb29ac21cc. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 847.230675] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] Acquiring lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.234226] env[61998]: DEBUG nova.scheduler.client.report [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 847.276291] env[61998]: DEBUG nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 847.380566] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.566403] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070661} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.566689] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.569059] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01c7ea7-32a2-4038-90f6-c43d7f19b6ce {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.589671] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.590347] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e4d46ba-718e-4a74-b7f0-9a077ef40b91 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.611610] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 847.611610] env[61998]: value = "task-1388530" [ 847.611610] env[61998]: _type = "Task" [ 847.611610] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.619771] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.703580] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.703981] env[61998]: DEBUG nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Instance network_info: |[{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 847.704685] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] Acquired lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.704920] env[61998]: DEBUG nova.network.neutron [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Refreshing network info cache for port e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.706449] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:ac:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9f140c3-2474-433a-acc9-85eb29ac21cc', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.714628] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Creating folder: Project (1e479b6ac56f464fbc86574f776cd96c). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.715425] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fe56fbe-43d3-48c6-b770-3e572d98deb2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.729427] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Created folder: Project (1e479b6ac56f464fbc86574f776cd96c) in parent group-v294665. [ 847.729635] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Creating folder: Instances. Parent ref: group-v294710. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.729896] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13c3d321-4355-499e-86d9-5f78e5342a14 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.738640] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Created folder: Instances in parent group-v294710. [ 847.738876] env[61998]: DEBUG oslo.service.loopingcall [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.739074] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.739778] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.742772] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b2263ea-a631-4687-a820-a6c449c5ca18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.758663] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.055s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.760175] env[61998]: INFO nova.compute.claims [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.768243] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.768243] env[61998]: value = "task-1388533" [ 847.768243] env[61998]: _type = "Task" [ 847.768243] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.771626] env[61998]: INFO nova.scheduler.client.report [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleted allocations for instance a7225abb-d8ea-49fc-85da-7791d9dde5bc [ 847.778874] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388533, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.798181] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.921822] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 847.958277] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.958570] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.958728] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.959082] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.959305] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.959849] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.960132] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.960337] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.960471] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.960655] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.960832] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.961822] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a166f4d-e6bf-4c9c-95c6-a46981ea2158 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.975967] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8815b7f7-5e82-4774-80ab-d5167edc781b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.110183] env[61998]: DEBUG nova.compute.manager [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 848.111122] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a9d100-a47d-43d3-9464-be5e63e2d826 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.124734] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388530, 'name': ReconfigVM_Task, 'duration_secs': 0.482345} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.126447] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Reconfigured VM instance instance-00000041 to attach disk [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8/b3a3bb81-843b-4227-bebf-a8079f98c0f8.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.129216] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e8fdda1-fd0a-43dc-b6f4-fa21b0bd1b29 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.135979] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 848.135979] env[61998]: value = "task-1388534" [ 848.135979] env[61998]: _type = "Task" [ 848.135979] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.145707] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388534, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.288295] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388533, 'name': CreateVM_Task, 'duration_secs': 0.392825} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.289839] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4c939fa7-346e-44cd-b316-cb883100e537 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "a7225abb-d8ea-49fc-85da-7791d9dde5bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.720s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.291917] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.294910] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.296997] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.296997] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 848.296997] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50d46712-0d7e-47e4-845c-c24ccd047534 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.304437] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 848.304437] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]522501eb-b5d3-8716-5fba-a58543dbe857" [ 848.304437] env[61998]: _type = "Task" [ 848.304437] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.319937] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522501eb-b5d3-8716-5fba-a58543dbe857, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.509934] env[61998]: DEBUG nova.compute.manager [req-02a8a20e-6e19-4469-b02c-785a0d9d1e8b req-af4239d9-e2af-4bc8-96d4-42110f8abc17 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Received event network-vif-plugged-a6cc2f5c-f8f0-4800-9361-1ecc4455015b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 848.510210] env[61998]: DEBUG oslo_concurrency.lockutils [req-02a8a20e-6e19-4469-b02c-785a0d9d1e8b req-af4239d9-e2af-4bc8-96d4-42110f8abc17 service nova] Acquiring lock "d780cbdc-8838-42bf-8736-bc2dd60e659c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.510605] env[61998]: DEBUG oslo_concurrency.lockutils [req-02a8a20e-6e19-4469-b02c-785a0d9d1e8b req-af4239d9-e2af-4bc8-96d4-42110f8abc17 service nova] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.510718] env[61998]: DEBUG oslo_concurrency.lockutils [req-02a8a20e-6e19-4469-b02c-785a0d9d1e8b req-af4239d9-e2af-4bc8-96d4-42110f8abc17 service nova] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.510885] env[61998]: DEBUG nova.compute.manager [req-02a8a20e-6e19-4469-b02c-785a0d9d1e8b req-af4239d9-e2af-4bc8-96d4-42110f8abc17 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] No waiting events found dispatching network-vif-plugged-a6cc2f5c-f8f0-4800-9361-1ecc4455015b {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 848.511450] env[61998]: WARNING nova.compute.manager [req-02a8a20e-6e19-4469-b02c-785a0d9d1e8b req-af4239d9-e2af-4bc8-96d4-42110f8abc17 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Received unexpected event network-vif-plugged-a6cc2f5c-f8f0-4800-9361-1ecc4455015b for instance with vm_state building and task_state spawning. [ 848.613967] env[61998]: DEBUG nova.network.neutron [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updated VIF entry in instance network info cache for port e9f140c3-2474-433a-acc9-85eb29ac21cc. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.614869] env[61998]: DEBUG nova.network.neutron [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.633924] env[61998]: INFO nova.compute.manager [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] instance snapshotting [ 848.636877] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecc2e81-3b19-4b8f-80fa-dc47ede06d54 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.667094] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388534, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.668364] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e598199-bf1b-4cb3-a787-1659fb601047 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.796237] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "5eb786f1-7789-48a0-a04e-a4039e387f58" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.796484] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "5eb786f1-7789-48a0-a04e-a4039e387f58" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.796683] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "5eb786f1-7789-48a0-a04e-a4039e387f58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.796863] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "5eb786f1-7789-48a0-a04e-a4039e387f58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.797107] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "5eb786f1-7789-48a0-a04e-a4039e387f58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.799192] env[61998]: INFO nova.compute.manager [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Terminating instance [ 848.805460] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "refresh_cache-5eb786f1-7789-48a0-a04e-a4039e387f58" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.805460] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquired lock "refresh_cache-5eb786f1-7789-48a0-a04e-a4039e387f58" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.805460] env[61998]: DEBUG nova.network.neutron [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 848.817434] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522501eb-b5d3-8716-5fba-a58543dbe857, 'name': SearchDatastore_Task, 'duration_secs': 0.012152} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.818260] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.818478] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.818701] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.818841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.819018] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.822545] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35b4dcb3-9f18-4c38-a0da-b70ab9e902fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.851266] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.851452] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.852281] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d63126-9a55-49b5-947e-f64be93f6eb4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.858398] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 848.858398] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c0e3bb-1c38-58f0-dfea-be487a25d570" [ 848.858398] env[61998]: _type = "Task" [ 848.858398] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.869730] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c0e3bb-1c38-58f0-dfea-be487a25d570, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.020770] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79e7294-f56b-49ae-ba78-4f85fd3b717e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.028599] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44b1963-7bdc-458c-99de-80eecda67d10 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.060154] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1ea02c-a339-492a-abe2-aad4f6a597cf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.068438] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a706d426-ba84-4e18-b132-bc97cc60ed12 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.085527] env[61998]: DEBUG nova.compute.provider_tree [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.117491] env[61998]: DEBUG oslo_concurrency.lockutils [req-7a6600f1-bfab-4897-964f-f2e67c09fc3c req-da069ffe-5962-488a-b3d5-f67498a9ba22 service nova] Releasing lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.130319] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Successfully updated port: a6cc2f5c-f8f0-4800-9361-1ecc4455015b {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 849.149569] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388534, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.179524] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Creating Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 849.180706] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7c43ce29-aae4-46ab-b313-eaf280a1b2e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.187186] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 849.187186] env[61998]: value = "task-1388535" [ 849.187186] env[61998]: _type = "Task" [ 849.187186] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.194974] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388535, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.255728] env[61998]: DEBUG nova.compute.manager [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Received event network-changed-a6cc2f5c-f8f0-4800-9361-1ecc4455015b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 849.255945] env[61998]: DEBUG nova.compute.manager [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Refreshing instance network info cache due to event network-changed-a6cc2f5c-f8f0-4800-9361-1ecc4455015b. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 849.256516] env[61998]: DEBUG oslo_concurrency.lockutils [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] Acquiring lock "refresh_cache-d780cbdc-8838-42bf-8736-bc2dd60e659c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.256516] env[61998]: DEBUG oslo_concurrency.lockutils [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] Acquired lock "refresh_cache-d780cbdc-8838-42bf-8736-bc2dd60e659c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.256516] env[61998]: DEBUG nova.network.neutron [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Refreshing network info cache for port a6cc2f5c-f8f0-4800-9361-1ecc4455015b {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.332575] env[61998]: DEBUG nova.network.neutron [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.371767] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c0e3bb-1c38-58f0-dfea-be487a25d570, 'name': SearchDatastore_Task, 'duration_secs': 0.046846} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.372610] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f21bef7d-86f5-4891-8bfc-237d2b4698b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.378034] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 849.378034] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524fb1a8-791f-121e-8e75-67d446eed391" [ 849.378034] env[61998]: _type = "Task" [ 849.378034] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.385509] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524fb1a8-791f-121e-8e75-67d446eed391, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.386346] env[61998]: DEBUG nova.network.neutron [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.588874] env[61998]: DEBUG nova.scheduler.client.report [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 849.633753] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "refresh_cache-d780cbdc-8838-42bf-8736-bc2dd60e659c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.650916] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388534, 'name': Rename_Task, 'duration_secs': 1.320724} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.651322] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.651611] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c65305b-bf9d-47dc-bfe8-48f045523bb7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.658635] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 849.658635] env[61998]: value = "task-1388536" [ 849.658635] env[61998]: _type = "Task" [ 849.658635] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.668603] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388536, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.696027] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388535, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.791891] env[61998]: DEBUG nova.network.neutron [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.884533] env[61998]: DEBUG nova.network.neutron [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.890547] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Releasing lock "refresh_cache-5eb786f1-7789-48a0-a04e-a4039e387f58" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.891168] env[61998]: DEBUG nova.compute.manager [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 849.891532] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 849.891910] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524fb1a8-791f-121e-8e75-67d446eed391, 'name': SearchDatastore_Task, 'duration_secs': 0.017887} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.892912] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10a4ecb-68c7-4e4b-8c71-ec8f69107332 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.896073] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.896446] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] c51f684b-84f0-42b3-acf9-9e8317b10cb6/c51f684b-84f0-42b3-acf9-9e8317b10cb6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.896602] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-defc9140-e8ec-4997-843f-10e8b4c9e765 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.904515] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.905853] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84adf0e1-04c8-4bcc-a3f1-c495ce13aacf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.908152] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 849.908152] env[61998]: value = "task-1388537" [ 849.908152] env[61998]: _type = "Task" [ 849.908152] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.913272] env[61998]: DEBUG oslo_vmware.api [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 849.913272] env[61998]: value = "task-1388538" [ 849.913272] env[61998]: _type = "Task" [ 849.913272] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.920843] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.928264] env[61998]: DEBUG oslo_vmware.api [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.095892] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.096643] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 850.100166] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.437s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.100798] env[61998]: INFO nova.compute.claims [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.173766] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388536, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.199684] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388535, 'name': CreateSnapshot_Task, 'duration_secs': 0.900776} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.199938] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Created Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 850.200818] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4612a35-d2be-4eb0-a318-112d48b2d814 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.323628] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "b9c5feec-7bfd-470e-9833-b45403195e83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.324251] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "b9c5feec-7bfd-470e-9833-b45403195e83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.395525] env[61998]: DEBUG oslo_concurrency.lockutils [req-79d6f226-49f1-48fb-ab94-c6e22983bee0 req-c39a8457-d74d-4113-9875-ff800911ce68 service nova] Releasing lock "refresh_cache-d780cbdc-8838-42bf-8736-bc2dd60e659c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.395525] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "refresh_cache-d780cbdc-8838-42bf-8736-bc2dd60e659c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.395525] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.427159] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388537, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.430622] env[61998]: DEBUG oslo_vmware.api [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388538, 'name': PowerOffVM_Task, 'duration_secs': 0.207318} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.431160] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.433365] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.433365] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fef8ac9-397e-46ca-b407-fcf788d315d0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.457853] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.458084] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.458282] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleting the datastore file [datastore1] 5eb786f1-7789-48a0-a04e-a4039e387f58 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.458608] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dee9708-a534-4b6e-bd6b-97fd5f57ce07 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.465814] env[61998]: DEBUG oslo_vmware.api [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for the task: (returnval){ [ 850.465814] env[61998]: value = "task-1388540" [ 850.465814] env[61998]: _type = "Task" [ 850.465814] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.473590] env[61998]: DEBUG oslo_vmware.api [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.608658] env[61998]: DEBUG nova.compute.utils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 850.610640] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 850.610831] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.662041] env[61998]: DEBUG nova.policy [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5f2c10ceb02461095c74c83d4a745d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e4e8f3e000f4c2383b3a39f24499577', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.673705] env[61998]: DEBUG oslo_vmware.api [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388536, 'name': PowerOnVM_Task, 'duration_secs': 0.778322} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.673705] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.674455] env[61998]: DEBUG nova.compute.manager [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 850.674689] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8190ddd-3036-48cf-95c5-311a7b7ed176 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.723253] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Creating linked-clone VM from snapshot {{(pid=61998) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 850.723924] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0457dd3f-dfdc-42b2-a2c6-a011ae2e18c5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.732653] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 850.732653] env[61998]: value = "task-1388541" [ 850.732653] env[61998]: _type = "Task" [ 850.732653] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.740819] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388541, 'name': CloneVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.920212] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540651} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.921693] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] c51f684b-84f0-42b3-acf9-9e8317b10cb6/c51f684b-84f0-42b3-acf9-9e8317b10cb6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.922705] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.922705] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af6974b8-6b12-4a04-87a1-4003cbcf3011 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.928803] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 850.928803] env[61998]: value = "task-1388542" [ 850.928803] env[61998]: _type = "Task" [ 850.928803] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.940459] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.962438] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.976880] env[61998]: DEBUG oslo_vmware.api [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Task: {'id': task-1388540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400907} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.977299] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.977557] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.977794] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.978069] env[61998]: INFO nova.compute.manager [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Took 1.09 seconds to destroy the instance on the hypervisor. [ 850.978405] env[61998]: DEBUG oslo.service.loopingcall [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.978682] env[61998]: DEBUG nova.compute.manager [-] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 850.978863] env[61998]: DEBUG nova.network.neutron [-] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.996163] env[61998]: DEBUG nova.network.neutron [-] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.113350] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 851.124133] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Successfully created port: 994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.193621] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.194598] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Updating instance_info_cache with network_info: [{"id": "a6cc2f5c-f8f0-4800-9361-1ecc4455015b", "address": "fa:16:3e:1f:f2:34", "network": {"id": "ccb4f3a5-1f5d-46ca-a47c-8ae0d3302072", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-855620553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e4e8f3e000f4c2383b3a39f24499577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6cc2f5c-f8", "ovs_interfaceid": "a6cc2f5c-f8f0-4800-9361-1ecc4455015b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.246032] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388541, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.397468] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783fe6aa-047c-4a47-81b1-da8cbdd594da {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.407172] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a166b5a-7ca2-4106-b74e-e6076d9980db {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.442374] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b523ad9-4fd1-4fc7-846a-08edcd4c82c7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.451660] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075345} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.451660] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.452406] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db0248c-5ebe-4a37-8b1d-61d3900d11f3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.455714] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d4ee77-4a8b-408c-af99-ac95d71ebc65 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.482047] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] c51f684b-84f0-42b3-acf9-9e8317b10cb6/c51f684b-84f0-42b3-acf9-9e8317b10cb6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.491092] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eaa79566-e64d-47ad-88b3-264907040d04 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.505268] env[61998]: DEBUG nova.compute.provider_tree [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.506729] env[61998]: DEBUG nova.network.neutron [-] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.515946] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 851.515946] env[61998]: value = "task-1388543" [ 851.515946] env[61998]: _type = "Task" [ 851.515946] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.524105] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388543, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.697185] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "refresh_cache-d780cbdc-8838-42bf-8736-bc2dd60e659c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.697522] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Instance network_info: |[{"id": "a6cc2f5c-f8f0-4800-9361-1ecc4455015b", "address": "fa:16:3e:1f:f2:34", "network": {"id": "ccb4f3a5-1f5d-46ca-a47c-8ae0d3302072", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-855620553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e4e8f3e000f4c2383b3a39f24499577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6cc2f5c-f8", "ovs_interfaceid": "a6cc2f5c-f8f0-4800-9361-1ecc4455015b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 851.697926] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:f2:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6cc2f5c-f8f0-4800-9361-1ecc4455015b', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.705454] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Creating folder: Project (5e4e8f3e000f4c2383b3a39f24499577). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.705848] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b56eed6e-0528-486d-9fc2-4763f15408a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.717032] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Created folder: Project (5e4e8f3e000f4c2383b3a39f24499577) in parent group-v294665. [ 851.717032] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Creating folder: Instances. Parent ref: group-v294715. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.717032] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1581a839-174d-4160-9f1f-6a3ecba7c922 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.725419] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Created folder: Instances in parent group-v294715. [ 851.725701] env[61998]: DEBUG oslo.service.loopingcall [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.725930] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 851.726490] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0938185e-7ecc-451a-a0d4-9185d6ccc73c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.748788] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388541, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.750009] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.750009] env[61998]: value = "task-1388546" [ 851.750009] env[61998]: _type = "Task" [ 851.750009] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.756945] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.009847] env[61998]: DEBUG nova.scheduler.client.report [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 852.012515] env[61998]: INFO nova.compute.manager [-] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Took 1.03 seconds to deallocate network for instance. [ 852.028650] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.128199] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 852.149504] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 852.149757] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 852.149888] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.150081] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 852.150228] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.150371] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 852.150571] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 852.150725] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 852.150887] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 852.151056] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 852.151270] env[61998]: DEBUG nova.virt.hardware [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 852.152145] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cbbbfc-f261-4ad6-b682-a0ec79e24b84 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.160017] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c11284-73c3-4435-b693-50aec5fe1ee4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.250963] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388541, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.261072] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.498625] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.498904] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.499127] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.499307] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.499472] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.503019] env[61998]: INFO nova.compute.manager [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Terminating instance [ 852.504684] env[61998]: DEBUG nova.compute.manager [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 852.504819] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 852.505664] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c778239-7624-4235-a067-a9a96b943673 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.513074] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.513323] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68aaad0c-d270-40ea-9db1-ba096d1ee615 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.515409] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.515751] env[61998]: DEBUG nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 852.518206] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.156s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.518414] env[61998]: DEBUG nova.objects.instance [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lazy-loading 'resources' on Instance uuid c55717f0-8ef2-4e55-b1cf-60f6faea9e5e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 852.520833] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.525413] env[61998]: DEBUG oslo_vmware.api [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 852.525413] env[61998]: value = "task-1388547" [ 852.525413] env[61998]: _type = "Task" [ 852.525413] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.532146] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388543, 'name': ReconfigVM_Task, 'duration_secs': 0.591245} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.532761] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Reconfigured VM instance instance-00000045 to attach disk [datastore1] c51f684b-84f0-42b3-acf9-9e8317b10cb6/c51f684b-84f0-42b3-acf9-9e8317b10cb6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.533641] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b11c281f-d9e1-4aed-b5fc-9ca8d4fc7c65 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.538554] env[61998]: DEBUG oslo_vmware.api [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.544554] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 852.544554] env[61998]: value = "task-1388548" [ 852.544554] env[61998]: _type = "Task" [ 852.544554] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.553236] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388548, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.662305] env[61998]: DEBUG nova.compute.manager [req-0eb54d83-577b-462c-ba11-6758dfdfa8ec req-fa98635c-9d92-46fc-8d07-c67abca8bddb service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Received event network-vif-plugged-994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 852.662600] env[61998]: DEBUG oslo_concurrency.lockutils [req-0eb54d83-577b-462c-ba11-6758dfdfa8ec req-fa98635c-9d92-46fc-8d07-c67abca8bddb service nova] Acquiring lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.662856] env[61998]: DEBUG oslo_concurrency.lockutils [req-0eb54d83-577b-462c-ba11-6758dfdfa8ec req-fa98635c-9d92-46fc-8d07-c67abca8bddb service nova] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.663335] env[61998]: DEBUG oslo_concurrency.lockutils [req-0eb54d83-577b-462c-ba11-6758dfdfa8ec req-fa98635c-9d92-46fc-8d07-c67abca8bddb service nova] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.663634] env[61998]: DEBUG nova.compute.manager [req-0eb54d83-577b-462c-ba11-6758dfdfa8ec req-fa98635c-9d92-46fc-8d07-c67abca8bddb service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] No waiting events found dispatching network-vif-plugged-994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 852.664146] env[61998]: WARNING nova.compute.manager [req-0eb54d83-577b-462c-ba11-6758dfdfa8ec req-fa98635c-9d92-46fc-8d07-c67abca8bddb service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Received unexpected event network-vif-plugged-994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 for instance with vm_state building and task_state spawning. [ 852.752026] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388541, 'name': CloneVM_Task, 'duration_secs': 1.934206} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.755419] env[61998]: INFO nova.virt.vmwareapi.vmops [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Created linked-clone VM from snapshot [ 852.757689] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f996a4d5-01e2-4027-8d5f-85ebc053d743 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.763665] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.767545] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Successfully updated port: 994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.769845] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Uploading image 3be4b34d-486d-4eca-aa62-512911df94fe {{(pid=61998) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 852.780071] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Destroying the VM {{(pid=61998) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 852.780343] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e8317d98-29cb-4093-89b3-2f66880904c8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.786771] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 852.786771] env[61998]: value = "task-1388549" [ 852.786771] env[61998]: _type = "Task" [ 852.786771] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.794962] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388549, 'name': Destroy_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.986732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "23265b26-7579-4514-a172-8cf2ec124ec6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.986859] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "23265b26-7579-4514-a172-8cf2ec124ec6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.022154] env[61998]: DEBUG nova.compute.utils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.023785] env[61998]: DEBUG nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 853.023960] env[61998]: DEBUG nova.network.neutron [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 853.040166] env[61998]: DEBUG oslo_vmware.api [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388547, 'name': PowerOffVM_Task, 'duration_secs': 0.181699} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.040430] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.040721] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 853.040928] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72491874-1eda-4ee7-bab2-f87f4cd87b64 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.056484] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388548, 'name': Rename_Task, 'duration_secs': 0.159757} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.056788] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.057105] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ce97df6-eee8-4505-ade8-3321ecff7db9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.064764] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 853.064764] env[61998]: value = "task-1388551" [ 853.064764] env[61998]: _type = "Task" [ 853.064764] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.075637] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.088933] env[61998]: DEBUG nova.policy [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '744da696f7c64f62ae04195aa737fab4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c75c9b7c8d6b441d80fe512c37c88679', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 853.260786] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.270212] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "refresh_cache-f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.270380] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "refresh_cache-f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.270657] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.275505] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5508394c-1eae-40b6-9193-c9b2644f51b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.283724] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f657cb-8fea-4208-baf4-11849756b14a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.297014] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388549, 'name': Destroy_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.322737] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd761c0-a22e-4482-afc2-4b7a74e08b8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.330399] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe510e3-38e9-4653-92ee-645cf8287186 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.345452] env[61998]: DEBUG nova.compute.provider_tree [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.432297] env[61998]: DEBUG nova.network.neutron [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Successfully created port: e3958acf-f252-41b8-84ce-2b216db5b0ff {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.527469] env[61998]: DEBUG nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 853.579329] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388551, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.763012] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.799375] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388549, 'name': Destroy_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.813093] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.848610] env[61998]: DEBUG nova.scheduler.client.report [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 853.938526] env[61998]: DEBUG nova.network.neutron [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Updating instance_info_cache with network_info: [{"id": "994d79bc-7e50-47c8-9a8c-1f381d9d3fe8", "address": "fa:16:3e:96:5b:37", "network": {"id": "ccb4f3a5-1f5d-46ca-a47c-8ae0d3302072", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-855620553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e4e8f3e000f4c2383b3a39f24499577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap994d79bc-7e", "ovs_interfaceid": "994d79bc-7e50-47c8-9a8c-1f381d9d3fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.076701] env[61998]: DEBUG oslo_vmware.api [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388551, 'name': PowerOnVM_Task, 'duration_secs': 0.730934} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.076955] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 854.077155] env[61998]: INFO nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Took 9.49 seconds to spawn the instance on the hypervisor. [ 854.077333] env[61998]: DEBUG nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 854.078082] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74644dcc-7b78-4935-98b7-f1828facdf20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.262298] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.296920] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388549, 'name': Destroy_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.353254] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.835s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.355678] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.295s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.357151] env[61998]: INFO nova.compute.claims [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.382194] env[61998]: INFO nova.scheduler.client.report [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Deleted allocations for instance c55717f0-8ef2-4e55-b1cf-60f6faea9e5e [ 854.440822] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "refresh_cache-f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.441146] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Instance network_info: |[{"id": "994d79bc-7e50-47c8-9a8c-1f381d9d3fe8", "address": "fa:16:3e:96:5b:37", "network": {"id": "ccb4f3a5-1f5d-46ca-a47c-8ae0d3302072", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-855620553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e4e8f3e000f4c2383b3a39f24499577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap994d79bc-7e", "ovs_interfaceid": "994d79bc-7e50-47c8-9a8c-1f381d9d3fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 854.441541] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:5b:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f87a752-ebb0-49a4-a67b-e356fa45b89b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '994d79bc-7e50-47c8-9a8c-1f381d9d3fe8', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.449595] env[61998]: DEBUG oslo.service.loopingcall [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.450017] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.450248] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a3b07c4-4dfc-4c30-918e-2f2b609bcd6f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.469850] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.469850] env[61998]: value = "task-1388552" [ 854.469850] env[61998]: _type = "Task" [ 854.469850] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.476850] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388552, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.542200] env[61998]: DEBUG nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 854.568494] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 854.568769] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 854.568948] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.569322] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 854.569514] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.569685] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 854.569895] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 854.570072] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 854.570244] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 854.570401] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 854.570567] env[61998]: DEBUG nova.virt.hardware [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 854.571429] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742d453a-c8de-4a4c-b488-04b14f05ea8d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.579714] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d684c2f-9edd-4588-9d58-87e422c7a802 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.600990] env[61998]: INFO nova.compute.manager [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Took 29.17 seconds to build instance. [ 854.686329] env[61998]: DEBUG nova.compute.manager [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Received event network-changed-994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 854.686628] env[61998]: DEBUG nova.compute.manager [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Refreshing instance network info cache due to event network-changed-994d79bc-7e50-47c8-9a8c-1f381d9d3fe8. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 854.686825] env[61998]: DEBUG oslo_concurrency.lockutils [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] Acquiring lock "refresh_cache-f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.686951] env[61998]: DEBUG oslo_concurrency.lockutils [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] Acquired lock "refresh_cache-f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.687153] env[61998]: DEBUG nova.network.neutron [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Refreshing network info cache for port 994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.763735] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.799906] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388549, 'name': Destroy_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.889530] env[61998]: DEBUG oslo_concurrency.lockutils [None req-632684e4-3211-4024-b4ff-37fe5a564f6b tempest-ServerPasswordTestJSON-114933406 tempest-ServerPasswordTestJSON-114933406-project-member] Lock "c55717f0-8ef2-4e55-b1cf-60f6faea9e5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.490s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.979688] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388552, 'name': CreateVM_Task, 'duration_secs': 0.350111} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.979862] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.980593] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.980752] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.981085] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 854.981334] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82e607fc-2a3d-46cf-9e57-552608f5bd70 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.985661] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 854.985661] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c5052a-11ad-271d-88fa-5b830acb3c01" [ 854.985661] env[61998]: _type = "Task" [ 854.985661] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.992932] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c5052a-11ad-271d-88fa-5b830acb3c01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.103306] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c4c910f9-5473-4d94-9a18-7b188842becf tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.128s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.238175] env[61998]: DEBUG nova.network.neutron [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Successfully updated port: e3958acf-f252-41b8-84ce-2b216db5b0ff {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 855.263936] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.303016] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388549, 'name': Destroy_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.414396] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.414673] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.414903] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleting the datastore file [datastore1] b3a3bb81-843b-4227-bebf-a8079f98c0f8 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.415239] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5471249-4b6c-4a95-a917-94b20333d89f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.423519] env[61998]: DEBUG oslo_vmware.api [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 855.423519] env[61998]: value = "task-1388553" [ 855.423519] env[61998]: _type = "Task" [ 855.423519] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.439133] env[61998]: DEBUG oslo_vmware.api [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.497564] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c5052a-11ad-271d-88fa-5b830acb3c01, 'name': SearchDatastore_Task, 'duration_secs': 0.011703} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.497873] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.498118] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.498439] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.498753] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.498897] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.499317] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac305a79-530c-46de-a8f8-f39cc63c1b70 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.506394] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.506474] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.509496] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1c1ac7c-f7e1-4b23-9a7a-f028f17cfb32 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.514707] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 855.514707] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523d7833-01a1-493c-3a9e-00b842b0028f" [ 855.514707] env[61998]: _type = "Task" [ 855.514707] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.528182] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523d7833-01a1-493c-3a9e-00b842b0028f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.605988] env[61998]: DEBUG nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 855.635052] env[61998]: DEBUG nova.network.neutron [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Updated VIF entry in instance network info cache for port 994d79bc-7e50-47c8-9a8c-1f381d9d3fe8. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.635399] env[61998]: DEBUG nova.network.neutron [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Updating instance_info_cache with network_info: [{"id": "994d79bc-7e50-47c8-9a8c-1f381d9d3fe8", "address": "fa:16:3e:96:5b:37", "network": {"id": "ccb4f3a5-1f5d-46ca-a47c-8ae0d3302072", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-855620553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e4e8f3e000f4c2383b3a39f24499577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f87a752-ebb0-49a4-a67b-e356fa45b89b", "external-id": "nsx-vlan-transportzone-889", "segmentation_id": 889, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap994d79bc-7e", "ovs_interfaceid": "994d79bc-7e50-47c8-9a8c-1f381d9d3fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.662928] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925aefdd-5d43-4190-995e-0c70620d0ecd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.670913] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2068f054-d5af-44f6-9ccb-bfe0344fb8da {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.701051] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a7cdf6-39f9-4287-8082-c85283e96335 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.708980] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cadc7c-3402-4660-993e-0eeb2a43b8c7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.723420] env[61998]: DEBUG nova.compute.provider_tree [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.742052] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-ac4a8463-91ba-4061-aa5d-1c72c4f532ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.742211] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-ac4a8463-91ba-4061-aa5d-1c72c4f532ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.742363] env[61998]: DEBUG nova.network.neutron [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.764335] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388546, 'name': CreateVM_Task, 'duration_secs': 3.657103} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.765660] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.768136] env[61998]: DEBUG nova.compute.manager [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received event network-changed-e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 855.768136] env[61998]: DEBUG nova.compute.manager [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Refreshing instance network info cache due to event network-changed-e9f140c3-2474-433a-acc9-85eb29ac21cc. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 855.768136] env[61998]: DEBUG oslo_concurrency.lockutils [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] Acquiring lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.768136] env[61998]: DEBUG oslo_concurrency.lockutils [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] Acquired lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.768136] env[61998]: DEBUG nova.network.neutron [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Refreshing network info cache for port e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 855.772032] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.772032] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.772032] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 855.772032] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40fc433a-2a55-4472-8f4d-4514cc9eeb35 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.776742] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 855.776742] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]526ce82a-2463-1445-2af5-6514cc832800" [ 855.776742] env[61998]: _type = "Task" [ 855.776742] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.785610] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526ce82a-2463-1445-2af5-6514cc832800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.800832] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388549, 'name': Destroy_Task, 'duration_secs': 2.736642} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.801143] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Destroyed the VM [ 855.801432] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Deleting Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 855.801674] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fff47043-7f8a-41eb-aaab-d8dd8281743e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.810605] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 855.810605] env[61998]: value = "task-1388554" [ 855.810605] env[61998]: _type = "Task" [ 855.810605] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.816638] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388554, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.940049] env[61998]: DEBUG oslo_vmware.api [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190718} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.940336] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.940615] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.940849] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.941168] env[61998]: INFO nova.compute.manager [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Took 3.44 seconds to destroy the instance on the hypervisor. [ 855.941728] env[61998]: DEBUG oslo.service.loopingcall [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.941929] env[61998]: DEBUG nova.compute.manager [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 855.942086] env[61998]: DEBUG nova.network.neutron [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.029809] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523d7833-01a1-493c-3a9e-00b842b0028f, 'name': SearchDatastore_Task, 'duration_secs': 0.02761} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.030734] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31ef4c82-7fc5-4a46-a8c8-a45cb797c800 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.037619] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 856.037619] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5295893b-459e-b8cb-664e-6585c4dc4d36" [ 856.037619] env[61998]: _type = "Task" [ 856.037619] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.047841] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5295893b-459e-b8cb-664e-6585c4dc4d36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.134666] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.139566] env[61998]: DEBUG oslo_concurrency.lockutils [req-9ca6af78-b481-4d9b-ae4f-9e537d9bf237 req-95615745-2137-4e0c-b447-97f70e2bf5de service nova] Releasing lock "refresh_cache-f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.226749] env[61998]: DEBUG nova.scheduler.client.report [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 856.287532] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526ce82a-2463-1445-2af5-6514cc832800, 'name': SearchDatastore_Task, 'duration_secs': 0.010522} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.287750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.287972] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.288191] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.321384] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388554, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.485906] env[61998]: DEBUG nova.network.neutron [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 856.549544] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5295893b-459e-b8cb-664e-6585c4dc4d36, 'name': SearchDatastore_Task, 'duration_secs': 0.016435} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.549868] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.550293] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83/f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.550641] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.551223] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.551223] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1d3ecac-626b-47a5-8b4f-68fed5a38d0b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.555832] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e19958b-54b3-497d-b042-87df9451c3a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.561750] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 856.561750] env[61998]: value = "task-1388555" [ 856.561750] env[61998]: _type = "Task" [ 856.561750] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.565764] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.565930] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.569608] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cc74c0f-691f-47a4-a90c-4446e76e2850 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.580210] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.581483] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 856.581483] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528f7b8a-3476-1134-cb86-ca5a3241e432" [ 856.581483] env[61998]: _type = "Task" [ 856.581483] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.592093] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528f7b8a-3476-1134-cb86-ca5a3241e432, 'name': SearchDatastore_Task, 'duration_secs': 0.013212} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.593115] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af8f32d9-bae9-49c6-b131-5ae6fc7f651e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.598747] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 856.598747] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52768fd1-40d1-8416-216e-10f96b838779" [ 856.598747] env[61998]: _type = "Task" [ 856.598747] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.608779] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52768fd1-40d1-8416-216e-10f96b838779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.702473] env[61998]: DEBUG nova.network.neutron [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.735072] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.735634] env[61998]: DEBUG nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 856.738163] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.900s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.742918] env[61998]: INFO nova.compute.claims [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.791177] env[61998]: DEBUG nova.compute.manager [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Received event network-vif-plugged-e3958acf-f252-41b8-84ce-2b216db5b0ff {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 856.791419] env[61998]: DEBUG oslo_concurrency.lockutils [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] Acquiring lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.791618] env[61998]: DEBUG oslo_concurrency.lockutils [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.791784] env[61998]: DEBUG oslo_concurrency.lockutils [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.791951] env[61998]: DEBUG nova.compute.manager [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] No waiting events found dispatching network-vif-plugged-e3958acf-f252-41b8-84ce-2b216db5b0ff {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 856.794569] env[61998]: WARNING nova.compute.manager [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Received unexpected event network-vif-plugged-e3958acf-f252-41b8-84ce-2b216db5b0ff for instance with vm_state building and task_state spawning. [ 856.794783] env[61998]: DEBUG nova.compute.manager [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Received event network-changed-e3958acf-f252-41b8-84ce-2b216db5b0ff {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 856.794948] env[61998]: DEBUG nova.compute.manager [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Refreshing instance network info cache due to event network-changed-e3958acf-f252-41b8-84ce-2b216db5b0ff. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 856.795277] env[61998]: DEBUG oslo_concurrency.lockutils [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] Acquiring lock "refresh_cache-ac4a8463-91ba-4061-aa5d-1c72c4f532ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.824463] env[61998]: DEBUG oslo_vmware.api [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388554, 'name': RemoveSnapshot_Task, 'duration_secs': 0.6844} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.824746] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Deleted Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 856.832972] env[61998]: DEBUG nova.network.neutron [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Updating instance_info_cache with network_info: [{"id": "e3958acf-f252-41b8-84ce-2b216db5b0ff", "address": "fa:16:3e:2f:c9:ce", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3958acf-f2", "ovs_interfaceid": "e3958acf-f252-41b8-84ce-2b216db5b0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.937713] env[61998]: DEBUG nova.network.neutron [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updated VIF entry in instance network info cache for port e9f140c3-2474-433a-acc9-85eb29ac21cc. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 856.938206] env[61998]: DEBUG nova.network.neutron [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.071819] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388555, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.111242] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52768fd1-40d1-8416-216e-10f96b838779, 'name': SearchDatastore_Task, 'duration_secs': 0.039755} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.111666] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.111988] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] d780cbdc-8838-42bf-8736-bc2dd60e659c/d780cbdc-8838-42bf-8736-bc2dd60e659c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 857.112369] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eaccee26-adbc-4d06-a1e3-7f7396187a4d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.119282] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 857.119282] env[61998]: value = "task-1388556" [ 857.119282] env[61998]: _type = "Task" [ 857.119282] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.127885] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.205744] env[61998]: INFO nova.compute.manager [-] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Took 1.26 seconds to deallocate network for instance. [ 857.250478] env[61998]: DEBUG nova.compute.utils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.256541] env[61998]: DEBUG nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 857.256699] env[61998]: DEBUG nova.network.neutron [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.321204] env[61998]: DEBUG nova.policy [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c635524b8cc04211861fb7c5056c5175', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68f3a45cd0eb45e0b48b0f4efdcd51a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 857.330529] env[61998]: WARNING nova.compute.manager [None req-bc793d66-0f5e-4608-8600-148b2ded89a7 tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Image not found during snapshot: nova.exception.ImageNotFound: Image 3be4b34d-486d-4eca-aa62-512911df94fe could not be found. [ 857.334565] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-ac4a8463-91ba-4061-aa5d-1c72c4f532ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.334877] env[61998]: DEBUG nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Instance network_info: |[{"id": "e3958acf-f252-41b8-84ce-2b216db5b0ff", "address": "fa:16:3e:2f:c9:ce", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3958acf-f2", "ovs_interfaceid": "e3958acf-f252-41b8-84ce-2b216db5b0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 857.335244] env[61998]: DEBUG oslo_concurrency.lockutils [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] Acquired lock "refresh_cache-ac4a8463-91ba-4061-aa5d-1c72c4f532ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.335432] env[61998]: DEBUG nova.network.neutron [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Refreshing network info cache for port e3958acf-f252-41b8-84ce-2b216db5b0ff {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 857.336735] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:c9:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3958acf-f252-41b8-84ce-2b216db5b0ff', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.345829] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating folder: Project (c75c9b7c8d6b441d80fe512c37c88679). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 857.346573] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59f43316-fc13-4a1b-b3e7-8d005c824db8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.359534] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created folder: Project (c75c9b7c8d6b441d80fe512c37c88679) in parent group-v294665. [ 857.359757] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating folder: Instances. Parent ref: group-v294719. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 857.360030] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b29bc7f-500a-4626-9f71-6ac17cedc63a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.371277] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created folder: Instances in parent group-v294719. [ 857.371584] env[61998]: DEBUG oslo.service.loopingcall [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.371835] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 857.372054] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b832ff0c-d536-451a-9d37-e4c05afbb5ff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.392015] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.392015] env[61998]: value = "task-1388559" [ 857.392015] env[61998]: _type = "Task" [ 857.392015] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.403561] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388559, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.441469] env[61998]: DEBUG oslo_concurrency.lockutils [req-3ec1c995-baa3-4b10-9135-803eb37f50b9 req-77ff5edd-2800-4e41-9be5-be0a12cf120e service nova] Releasing lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.574247] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388555, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.936515} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.574247] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83/f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.574590] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.574773] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76e5422a-d451-444c-a4a0-1963eb0db81b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.583363] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 857.583363] env[61998]: value = "task-1388560" [ 857.583363] env[61998]: _type = "Task" [ 857.583363] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.589672] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.606457] env[61998]: DEBUG nova.network.neutron [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Successfully created port: 294bd0fe-945d-4198-8a3c-13e489ae1134 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.632707] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.714343] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.760407] env[61998]: DEBUG nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 857.906503] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388559, 'name': CreateVM_Task, 'duration_secs': 0.396435} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.906503] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.908224] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.908224] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.908224] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 857.908224] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5484be44-f3ad-4790-b4b8-6e925d8853b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.913430] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 857.913430] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52485786-8b9c-cf4c-3dd5-51ac75dd9186" [ 857.913430] env[61998]: _type = "Task" [ 857.913430] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.928263] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52485786-8b9c-cf4c-3dd5-51ac75dd9186, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.058734] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca3373a-2b51-4336-a428-6a47889d149e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.068627] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06d198e-97c1-4bc9-be37-d3269f212f55 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.107981] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff85957-c5a1-41a5-b26c-ecb52fa6d6bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.122465] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85267a24-bcc9-40e3-9367-6d19c7219f7c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.125897] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067897} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.128917] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.130159] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d7b4d6-fd7a-45b6-b0bf-3d6328cb9445 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.144305] env[61998]: DEBUG nova.compute.provider_tree [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.146838] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388556, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.166277] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83/f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.168760] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abc084d2-d93a-4d35-be36-e5785e760995 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.192682] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 858.192682] env[61998]: value = "task-1388561" [ 858.192682] env[61998]: _type = "Task" [ 858.192682] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.209767] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388561, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.225062] env[61998]: DEBUG nova.network.neutron [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Updated VIF entry in instance network info cache for port e3958acf-f252-41b8-84ce-2b216db5b0ff. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 858.225487] env[61998]: DEBUG nova.network.neutron [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Updating instance_info_cache with network_info: [{"id": "e3958acf-f252-41b8-84ce-2b216db5b0ff", "address": "fa:16:3e:2f:c9:ce", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3958acf-f2", "ovs_interfaceid": "e3958acf-f252-41b8-84ce-2b216db5b0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.426584] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52485786-8b9c-cf4c-3dd5-51ac75dd9186, 'name': SearchDatastore_Task, 'duration_secs': 0.071235} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.426883] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.427133] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.427379] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.427518] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.427695] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.427951] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e82c89d8-d117-40bd-a305-b0a6e0e7f98e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.472709] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.472897] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.473646] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e2cbc15-c9cd-4a4e-916b-b1c426249dca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.478857] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 858.478857] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52774111-57a6-a245-9ec8-1a11b8e0ad75" [ 858.478857] env[61998]: _type = "Task" [ 858.478857] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.486366] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52774111-57a6-a245-9ec8-1a11b8e0ad75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.635011] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388556, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.235149} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.635315] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] d780cbdc-8838-42bf-8736-bc2dd60e659c/d780cbdc-8838-42bf-8736-bc2dd60e659c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 858.635533] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.635782] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26e93ae7-21e3-48f9-9e68-758c946352d1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.641791] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 858.641791] env[61998]: value = "task-1388562" [ 858.641791] env[61998]: _type = "Task" [ 858.641791] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.649658] env[61998]: DEBUG nova.scheduler.client.report [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 858.652694] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.702452] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388561, 'name': ReconfigVM_Task, 'duration_secs': 0.289437} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.702726] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Reconfigured VM instance instance-00000047 to attach disk [datastore1] f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83/f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.703483] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "c84d15dc-0ef2-44e2-b579-104678a6bb07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.703703] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.703892] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "c84d15dc-0ef2-44e2-b579-104678a6bb07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.704114] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.704301] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.705810] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c74cd18-d4b6-4bb9-b95f-0085c8420b71 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.707661] env[61998]: INFO nova.compute.manager [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Terminating instance [ 858.710038] env[61998]: DEBUG nova.compute.manager [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 858.710038] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.710546] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1d4126-4042-410e-8407-0321640392a1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.714464] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 858.714464] env[61998]: value = "task-1388563" [ 858.714464] env[61998]: _type = "Task" [ 858.714464] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.719619] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.720121] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3412da3b-7a2d-454b-be23-cb17b94898e2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.725931] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388563, 'name': Rename_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.727775] env[61998]: DEBUG oslo_concurrency.lockutils [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] Releasing lock "refresh_cache-ac4a8463-91ba-4061-aa5d-1c72c4f532ce" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.728070] env[61998]: DEBUG nova.compute.manager [req-a86dd7ed-99d1-4423-bb86-05bc6790fb9d req-8cbabe6c-461e-4fe7-a4dc-01750187d036 service nova] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Received event network-vif-deleted-50c8c3d1-4b6b-4eee-bdc2-574d9ceb20f7 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 858.730622] env[61998]: DEBUG oslo_vmware.api [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 858.730622] env[61998]: value = "task-1388564" [ 858.730622] env[61998]: _type = "Task" [ 858.730622] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.738439] env[61998]: DEBUG oslo_vmware.api [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.770042] env[61998]: DEBUG nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 858.794312] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 858.794592] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 858.794758] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.794941] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 858.795101] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.795261] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 858.795480] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 858.795661] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 858.795865] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 858.796082] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 858.796277] env[61998]: DEBUG nova.virt.hardware [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 858.797152] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7761506f-f065-431b-9673-37a8cba62b21 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.805804] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6124a279-6681-43eb-a783-92ab0a7d8716 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.988781] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52774111-57a6-a245-9ec8-1a11b8e0ad75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.153313] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069835} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.154093] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.154611] env[61998]: DEBUG nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 859.157117] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.157688] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.066s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.159339] env[61998]: INFO nova.compute.claims [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.162456] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf8aea1-93ff-469f-8e6d-aa4061bf2d83 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.189756] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] d780cbdc-8838-42bf-8736-bc2dd60e659c/d780cbdc-8838-42bf-8736-bc2dd60e659c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.190658] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b53af72-2538-43d5-bbe4-5cf5d664872f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.213215] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 859.213215] env[61998]: value = "task-1388565" [ 859.213215] env[61998]: _type = "Task" [ 859.213215] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.228273] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388565, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.230257] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388563, 'name': Rename_Task, 'duration_secs': 0.146773} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.230535] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.230797] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc7da5e3-79ce-4ae8-ac3b-92c8ccc52edb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.241987] env[61998]: DEBUG oslo_vmware.api [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388564, 'name': PowerOffVM_Task, 'duration_secs': 0.470162} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.243809] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.243990] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.244674] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 859.244674] env[61998]: value = "task-1388566" [ 859.244674] env[61998]: _type = "Task" [ 859.244674] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.244892] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7bf58b4-d638-4865-8748-c5f06af86a19 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.254262] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.283410] env[61998]: DEBUG nova.compute.manager [req-6b95dafd-767e-42e9-9da9-4bcd50b701cc req-4cef4493-3193-4941-961b-f5f4dcc589db service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Received event network-vif-plugged-294bd0fe-945d-4198-8a3c-13e489ae1134 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 859.283622] env[61998]: DEBUG oslo_concurrency.lockutils [req-6b95dafd-767e-42e9-9da9-4bcd50b701cc req-4cef4493-3193-4941-961b-f5f4dcc589db service nova] Acquiring lock "bcb05670-dc58-46be-a4a9-58a260e4132f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.283829] env[61998]: DEBUG oslo_concurrency.lockutils [req-6b95dafd-767e-42e9-9da9-4bcd50b701cc req-4cef4493-3193-4941-961b-f5f4dcc589db service nova] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.283993] env[61998]: DEBUG oslo_concurrency.lockutils [req-6b95dafd-767e-42e9-9da9-4bcd50b701cc req-4cef4493-3193-4941-961b-f5f4dcc589db service nova] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.284295] env[61998]: DEBUG nova.compute.manager [req-6b95dafd-767e-42e9-9da9-4bcd50b701cc req-4cef4493-3193-4941-961b-f5f4dcc589db service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] No waiting events found dispatching network-vif-plugged-294bd0fe-945d-4198-8a3c-13e489ae1134 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 859.284531] env[61998]: WARNING nova.compute.manager [req-6b95dafd-767e-42e9-9da9-4bcd50b701cc req-4cef4493-3193-4941-961b-f5f4dcc589db service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Received unexpected event network-vif-plugged-294bd0fe-945d-4198-8a3c-13e489ae1134 for instance with vm_state building and task_state spawning. [ 859.309667] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.309944] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.310215] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Deleting the datastore file [datastore2] c84d15dc-0ef2-44e2-b579-104678a6bb07 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.310535] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-138240f6-6c72-47cf-bb19-aef23878f137 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.316511] env[61998]: DEBUG oslo_vmware.api [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for the task: (returnval){ [ 859.316511] env[61998]: value = "task-1388568" [ 859.316511] env[61998]: _type = "Task" [ 859.316511] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.324679] env[61998]: DEBUG oslo_vmware.api [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.397205] env[61998]: DEBUG nova.network.neutron [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Successfully updated port: 294bd0fe-945d-4198-8a3c-13e489ae1134 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.491471] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52774111-57a6-a245-9ec8-1a11b8e0ad75, 'name': SearchDatastore_Task, 'duration_secs': 0.832829} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.493583] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-030221de-1fcb-41e7-b2c2-c356412cb423 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.498638] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 859.498638] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]522d4a79-ae04-7d61-febf-f23e58825423" [ 859.498638] env[61998]: _type = "Task" [ 859.498638] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.506161] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522d4a79-ae04-7d61-febf-f23e58825423, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.660085] env[61998]: DEBUG nova.compute.utils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.661478] env[61998]: DEBUG nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 859.661673] env[61998]: DEBUG nova.network.neutron [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.704957] env[61998]: DEBUG nova.policy [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dee243f740684fb6b686464f192622a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0b62db0465c54ceb9182aa64a42a7839', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 859.723251] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388565, 'name': ReconfigVM_Task, 'duration_secs': 0.3073} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.723528] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Reconfigured VM instance instance-00000046 to attach disk [datastore1] d780cbdc-8838-42bf-8736-bc2dd60e659c/d780cbdc-8838-42bf-8736-bc2dd60e659c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 859.724182] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd9c7a6e-d9c3-44e2-a1ad-49b236ad1270 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.730295] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 859.730295] env[61998]: value = "task-1388569" [ 859.730295] env[61998]: _type = "Task" [ 859.730295] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.737883] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388569, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.755960] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388566, 'name': PowerOnVM_Task, 'duration_secs': 0.484346} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.756328] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.756544] env[61998]: INFO nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Took 7.63 seconds to spawn the instance on the hypervisor. [ 859.756744] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 859.757575] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0801c6b3-1e16-40fa-a185-7d8ed66b5900 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.830268] env[61998]: DEBUG oslo_vmware.api [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Task: {'id': task-1388568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172963} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.831495] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.831495] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.831495] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.831495] env[61998]: INFO nova.compute.manager [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Took 1.12 seconds to destroy the instance on the hypervisor. [ 859.831495] env[61998]: DEBUG oslo.service.loopingcall [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.831495] env[61998]: DEBUG nova.compute.manager [-] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 859.832115] env[61998]: DEBUG nova.network.neutron [-] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 859.899541] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "refresh_cache-bcb05670-dc58-46be-a4a9-58a260e4132f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.899703] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquired lock "refresh_cache-bcb05670-dc58-46be-a4a9-58a260e4132f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.899873] env[61998]: DEBUG nova.network.neutron [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.009858] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522d4a79-ae04-7d61-febf-f23e58825423, 'name': SearchDatastore_Task, 'duration_secs': 0.011199} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.010484] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.010484] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] ac4a8463-91ba-4061-aa5d-1c72c4f532ce/ac4a8463-91ba-4061-aa5d-1c72c4f532ce.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 860.010806] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4855a3d-b736-481f-b754-53348de6b6e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.017338] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 860.017338] env[61998]: value = "task-1388570" [ 860.017338] env[61998]: _type = "Task" [ 860.017338] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.025687] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.156454] env[61998]: DEBUG nova.network.neutron [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Successfully created port: 381f5c1d-c282-43f5-a1b4-7ef6b8c559bc {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.165422] env[61998]: DEBUG nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 860.243229] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388569, 'name': Rename_Task, 'duration_secs': 0.166934} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.245973] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.246767] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1438feeb-1bc7-4931-92ed-c67a081e302a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.255174] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 860.255174] env[61998]: value = "task-1388571" [ 860.255174] env[61998]: _type = "Task" [ 860.255174] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.273022] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.286012] env[61998]: INFO nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Took 23.60 seconds to build instance. [ 860.443531] env[61998]: DEBUG nova.network.neutron [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.471114] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0de69ca-1ad4-4045-ab1e-44be7366dd05 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.484058] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f40a196-2a87-486a-be0d-c694cb7a74a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.525351] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ab23c8-0a83-49d8-add6-5217a491f50b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.533556] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388570, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.537040] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edd1fd3-c808-4d77-8746-e8095615ea7a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.554787] env[61998]: DEBUG nova.compute.provider_tree [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.658919] env[61998]: DEBUG nova.network.neutron [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Updating instance_info_cache with network_info: [{"id": "294bd0fe-945d-4198-8a3c-13e489ae1134", "address": "fa:16:3e:0c:58:dd", "network": {"id": "8e8ab3c7-13a9-4062-9041-4a1a0fbed482", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1766003645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68f3a45cd0eb45e0b48b0f4efdcd51a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap294bd0fe-94", "ovs_interfaceid": "294bd0fe-945d-4198-8a3c-13e489ae1134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.765834] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388571, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.787826] env[61998]: DEBUG nova.network.neutron [-] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.789989] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.555s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.031284] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388570, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534677} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.031590] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] ac4a8463-91ba-4061-aa5d-1c72c4f532ce/ac4a8463-91ba-4061-aa5d-1c72c4f532ce.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 861.031811] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 861.032097] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e1f8f2b-9792-4ec6-8263-5e76da6b88c0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.040909] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 861.040909] env[61998]: value = "task-1388572" [ 861.040909] env[61998]: _type = "Task" [ 861.040909] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.046573] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.057542] env[61998]: DEBUG nova.scheduler.client.report [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 861.161625] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Releasing lock "refresh_cache-bcb05670-dc58-46be-a4a9-58a260e4132f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.162770] env[61998]: DEBUG nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Instance network_info: |[{"id": "294bd0fe-945d-4198-8a3c-13e489ae1134", "address": "fa:16:3e:0c:58:dd", "network": {"id": "8e8ab3c7-13a9-4062-9041-4a1a0fbed482", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1766003645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68f3a45cd0eb45e0b48b0f4efdcd51a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap294bd0fe-94", "ovs_interfaceid": "294bd0fe-945d-4198-8a3c-13e489ae1134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 861.162905] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:58:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '294bd0fe-945d-4198-8a3c-13e489ae1134', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.170318] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Creating folder: Project (68f3a45cd0eb45e0b48b0f4efdcd51a3). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.170615] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a4a4ec9-5865-4a59-872f-251108496c53 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.173836] env[61998]: DEBUG nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 861.181171] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Created folder: Project (68f3a45cd0eb45e0b48b0f4efdcd51a3) in parent group-v294665. [ 861.181440] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Creating folder: Instances. Parent ref: group-v294722. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.181836] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3cdc85a1-a192-4141-bee9-f0ea4439076a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.192039] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Created folder: Instances in parent group-v294722. [ 861.192303] env[61998]: DEBUG oslo.service.loopingcall [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.192531] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.192772] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14dcdfa4-2641-4495-a519-bce4593d9829 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.218519] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.218519] env[61998]: value = "task-1388575" [ 861.218519] env[61998]: _type = "Task" [ 861.218519] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.224313] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 861.224563] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 861.224719] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.224900] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 861.225106] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.225275] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 861.225489] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 861.225647] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 861.225811] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 861.225973] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 861.226161] env[61998]: DEBUG nova.virt.hardware [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.226989] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124fa99d-4b1e-42c9-bd52-16ee7a1539b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.234492] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388575, 'name': CreateVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.238355] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0be391-6ca5-42eb-820a-aab283821f02 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.265784] env[61998]: DEBUG oslo_vmware.api [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388571, 'name': PowerOnVM_Task, 'duration_secs': 0.588172} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.266074] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.266828] env[61998]: INFO nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Took 13.34 seconds to spawn the instance on the hypervisor. [ 861.266828] env[61998]: DEBUG nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 861.270109] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abbd7fe-a9a3-4857-8686-b1908b0036b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.290914] env[61998]: INFO nova.compute.manager [-] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Took 1.46 seconds to deallocate network for instance. [ 861.300018] env[61998]: DEBUG nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 861.319773] env[61998]: DEBUG nova.compute.manager [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Received event network-changed-294bd0fe-945d-4198-8a3c-13e489ae1134 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 861.319966] env[61998]: DEBUG nova.compute.manager [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Refreshing instance network info cache due to event network-changed-294bd0fe-945d-4198-8a3c-13e489ae1134. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 861.320289] env[61998]: DEBUG oslo_concurrency.lockutils [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] Acquiring lock "refresh_cache-bcb05670-dc58-46be-a4a9-58a260e4132f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.320452] env[61998]: DEBUG oslo_concurrency.lockutils [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] Acquired lock "refresh_cache-bcb05670-dc58-46be-a4a9-58a260e4132f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.320599] env[61998]: DEBUG nova.network.neutron [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Refreshing network info cache for port 294bd0fe-945d-4198-8a3c-13e489ae1134 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.551287] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.302101} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.551287] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 861.552110] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1803e4-662e-433d-b31a-f19f21eaff6f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.567112] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.567616] env[61998]: DEBUG nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 861.579794] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] ac4a8463-91ba-4061-aa5d-1c72c4f532ce/ac4a8463-91ba-4061-aa5d-1c72c4f532ce.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 861.580373] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.592s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.584021] env[61998]: INFO nova.compute.claims [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.584889] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5e4c84c-4577-4e28-8c4b-9643c0bd969a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.606399] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 861.606399] env[61998]: value = "task-1388576" [ 861.606399] env[61998]: _type = "Task" [ 861.606399] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.616081] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388576, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.732010] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388575, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.787049] env[61998]: INFO nova.compute.manager [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Took 34.57 seconds to build instance. [ 861.801031] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.823156] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.972503] env[61998]: DEBUG nova.network.neutron [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Successfully updated port: 381f5c1d-c282-43f5-a1b4-7ef6b8c559bc {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 862.086859] env[61998]: DEBUG nova.compute.utils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.088739] env[61998]: DEBUG nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 862.088910] env[61998]: DEBUG nova.network.neutron [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.118193] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.121669] env[61998]: DEBUG nova.network.neutron [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Updated VIF entry in instance network info cache for port 294bd0fe-945d-4198-8a3c-13e489ae1134. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.122194] env[61998]: DEBUG nova.network.neutron [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Updating instance_info_cache with network_info: [{"id": "294bd0fe-945d-4198-8a3c-13e489ae1134", "address": "fa:16:3e:0c:58:dd", "network": {"id": "8e8ab3c7-13a9-4062-9041-4a1a0fbed482", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1766003645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68f3a45cd0eb45e0b48b0f4efdcd51a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap294bd0fe-94", "ovs_interfaceid": "294bd0fe-945d-4198-8a3c-13e489ae1134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.143788] env[61998]: DEBUG nova.policy [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc1012da15104300854fc078c7e42b6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2b30526cfa64ab4802e1385aeaf9103', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.230173] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "d780cbdc-8838-42bf-8736-bc2dd60e659c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.230498] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388575, 'name': CreateVM_Task, 'duration_secs': 0.781647} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.230671] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.231362] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.231576] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.231875] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 862.232191] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78539bfb-e93a-49c0-965b-e150605897cf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.236940] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 862.236940] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]526d7c0f-2b70-ab20-6bd6-9b6741460632" [ 862.236940] env[61998]: _type = "Task" [ 862.236940] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.247408] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526d7c0f-2b70-ab20-6bd6-9b6741460632, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.293069] env[61998]: DEBUG oslo_concurrency.lockutils [None req-84bcd5e2-37e0-4345-b492-c0044be3d551 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.078s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.293069] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.062s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.293069] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "d780cbdc-8838-42bf-8736-bc2dd60e659c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.293069] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.293345] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.296027] env[61998]: INFO nova.compute.manager [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Terminating instance [ 862.296879] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.297150] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.297591] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.297845] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.298083] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.299736] env[61998]: DEBUG nova.compute.manager [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 862.299976] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.300849] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7a3180-3269-4ce9-863e-2265a833cb06 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.303913] env[61998]: INFO nova.compute.manager [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Terminating instance [ 862.306200] env[61998]: DEBUG nova.compute.manager [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 862.306714] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.307618] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5cbb41-743c-44c0-95d9-244a2e525483 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.313681] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.314462] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d407d78-6363-455a-833c-027404e0095d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.318333] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.319053] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00d911f0-5015-4802-9e83-4a29e9f5fcf4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.322992] env[61998]: DEBUG oslo_vmware.api [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 862.322992] env[61998]: value = "task-1388577" [ 862.322992] env[61998]: _type = "Task" [ 862.322992] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.327778] env[61998]: DEBUG oslo_vmware.api [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 862.327778] env[61998]: value = "task-1388578" [ 862.327778] env[61998]: _type = "Task" [ 862.327778] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.335742] env[61998]: DEBUG oslo_vmware.api [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.342993] env[61998]: DEBUG oslo_vmware.api [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.475325] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "refresh_cache-f3089d53-9c8f-4276-8e2e-0518cf29004b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.475508] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquired lock "refresh_cache-f3089d53-9c8f-4276-8e2e-0518cf29004b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.476599] env[61998]: DEBUG nova.network.neutron [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 862.501191] env[61998]: DEBUG nova.network.neutron [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Successfully created port: a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.593733] env[61998]: DEBUG nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 862.616520] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388576, 'name': ReconfigVM_Task, 'duration_secs': 0.699833} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.617090] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Reconfigured VM instance instance-00000048 to attach disk [datastore1] ac4a8463-91ba-4061-aa5d-1c72c4f532ce/ac4a8463-91ba-4061-aa5d-1c72c4f532ce.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.617771] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5203dd6-4a75-42ec-b008-d5ed6924a1a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.626406] env[61998]: DEBUG oslo_concurrency.lockutils [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] Releasing lock "refresh_cache-bcb05670-dc58-46be-a4a9-58a260e4132f" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.627719] env[61998]: DEBUG nova.compute.manager [req-afe721b8-71bd-435a-9567-661f1518990e req-a3455d6c-c1d5-4ece-9ac7-e1a7e50ae040 service nova] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Received event network-vif-deleted-d4d0a8be-1992-48b5-b1ed-4cf17b442314 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 862.627719] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 862.627719] env[61998]: value = "task-1388579" [ 862.627719] env[61998]: _type = "Task" [ 862.627719] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.634909] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388579, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.747761] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526d7c0f-2b70-ab20-6bd6-9b6741460632, 'name': SearchDatastore_Task, 'duration_secs': 0.016655} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.750557] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.750795] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.751056] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.751220] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.751407] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.751913] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf41edf2-68f7-416a-b3be-2df6bca982cc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.760473] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.760650] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.763855] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8ebee0b-7d9c-47f0-8f6f-4d6fe12a6032 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.769817] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 862.769817] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]521c5b38-d052-2f2a-8de9-31a471639207" [ 862.769817] env[61998]: _type = "Task" [ 862.769817] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.778142] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521c5b38-d052-2f2a-8de9-31a471639207, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.834476] env[61998]: DEBUG oslo_vmware.api [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388577, 'name': PowerOffVM_Task, 'duration_secs': 0.165786} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.837040] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.837221] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.837686] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae9bc205-07f1-43b9-90b4-8d43ce8cacda {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.841683] env[61998]: DEBUG oslo_vmware.api [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388578, 'name': PowerOffVM_Task, 'duration_secs': 0.222578} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.842246] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.842449] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.842704] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79d2f76a-f46b-4bb5-bbfc-9b97681ed327 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.857756] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da0ebed-5f7e-47e6-b9ae-cd22036ae445 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.864757] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3001a01a-c360-4abd-a07c-3a6c8ace1945 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.897329] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1caf8821-f31f-4f8b-84cb-65fd07c1b6e4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.905940] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ad2797-a413-4232-8d3e-12e4fd5b668c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.919762] env[61998]: DEBUG nova.compute.provider_tree [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.922248] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.922464] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.922640] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleting the datastore file [datastore1] d780cbdc-8838-42bf-8736-bc2dd60e659c {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.923143] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08527866-c91f-4bbc-8f4c-0f510f9f1887 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.931453] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.931682] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.931862] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleting the datastore file [datastore1] f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.932129] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f6650c9-f86e-4427-a5c7-ec64cf522bbf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.935480] env[61998]: DEBUG oslo_vmware.api [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 862.935480] env[61998]: value = "task-1388582" [ 862.935480] env[61998]: _type = "Task" [ 862.935480] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.940652] env[61998]: DEBUG oslo_vmware.api [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for the task: (returnval){ [ 862.940652] env[61998]: value = "task-1388583" [ 862.940652] env[61998]: _type = "Task" [ 862.940652] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.946989] env[61998]: DEBUG oslo_vmware.api [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.952168] env[61998]: DEBUG oslo_vmware.api [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.022234] env[61998]: DEBUG nova.network.neutron [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.137401] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388579, 'name': Rename_Task, 'duration_secs': 0.153436} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.138404] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 863.138623] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51891f52-fe5d-4562-a52f-163810f0cf3f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.144732] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 863.144732] env[61998]: value = "task-1388584" [ 863.144732] env[61998]: _type = "Task" [ 863.144732] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.154410] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.195249] env[61998]: DEBUG nova.network.neutron [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Updating instance_info_cache with network_info: [{"id": "381f5c1d-c282-43f5-a1b4-7ef6b8c559bc", "address": "fa:16:3e:c8:7f:a6", "network": {"id": "cf2d3673-094a-4ef2-87e9-9cc4479875fb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1600496719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b62db0465c54ceb9182aa64a42a7839", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap381f5c1d-c2", "ovs_interfaceid": "381f5c1d-c282-43f5-a1b4-7ef6b8c559bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.280721] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521c5b38-d052-2f2a-8de9-31a471639207, 'name': SearchDatastore_Task, 'duration_secs': 0.008677} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.281667] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fec34d9-985a-43ab-a096-9fb24e3ddf3a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.287314] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 863.287314] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5259715c-37e9-2c4d-27f9-2eafa5919c48" [ 863.287314] env[61998]: _type = "Task" [ 863.287314] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.295097] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5259715c-37e9-2c4d-27f9-2eafa5919c48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.344797] env[61998]: DEBUG nova.compute.manager [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Received event network-vif-plugged-381f5c1d-c282-43f5-a1b4-7ef6b8c559bc {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 863.345049] env[61998]: DEBUG oslo_concurrency.lockutils [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] Acquiring lock "f3089d53-9c8f-4276-8e2e-0518cf29004b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.345311] env[61998]: DEBUG oslo_concurrency.lockutils [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.345507] env[61998]: DEBUG oslo_concurrency.lockutils [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.345680] env[61998]: DEBUG nova.compute.manager [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] No waiting events found dispatching network-vif-plugged-381f5c1d-c282-43f5-a1b4-7ef6b8c559bc {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.346008] env[61998]: WARNING nova.compute.manager [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Received unexpected event network-vif-plugged-381f5c1d-c282-43f5-a1b4-7ef6b8c559bc for instance with vm_state building and task_state spawning. [ 863.346253] env[61998]: DEBUG nova.compute.manager [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Received event network-changed-381f5c1d-c282-43f5-a1b4-7ef6b8c559bc {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 863.346434] env[61998]: DEBUG nova.compute.manager [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Refreshing instance network info cache due to event network-changed-381f5c1d-c282-43f5-a1b4-7ef6b8c559bc. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 863.346604] env[61998]: DEBUG oslo_concurrency.lockutils [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] Acquiring lock "refresh_cache-f3089d53-9c8f-4276-8e2e-0518cf29004b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.426038] env[61998]: DEBUG nova.scheduler.client.report [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 863.448727] env[61998]: DEBUG oslo_vmware.api [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296334} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.449340] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.449559] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.449783] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.449970] env[61998]: INFO nova.compute.manager [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 863.450334] env[61998]: DEBUG oslo.service.loopingcall [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.450575] env[61998]: DEBUG nova.compute.manager [-] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 863.450709] env[61998]: DEBUG nova.network.neutron [-] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.455903] env[61998]: DEBUG oslo_vmware.api [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Task: {'id': task-1388583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288488} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.456481] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.456644] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.456806] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.456977] env[61998]: INFO nova.compute.manager [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Took 1.15 seconds to destroy the instance on the hypervisor. [ 863.457221] env[61998]: DEBUG oslo.service.loopingcall [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.457981] env[61998]: DEBUG nova.compute.manager [-] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 863.457981] env[61998]: DEBUG nova.network.neutron [-] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.603804] env[61998]: DEBUG nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 863.652155] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.652155] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.652155] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.652444] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.652444] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.652444] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.652716] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.652716] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.652815] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.652980] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.653222] env[61998]: DEBUG nova.virt.hardware [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.653967] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadc6652-f397-49ad-8bea-f4d19679533c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.659478] env[61998]: DEBUG oslo_vmware.api [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388584, 'name': PowerOnVM_Task, 'duration_secs': 0.4963} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.660073] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 863.660281] env[61998]: INFO nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Took 9.12 seconds to spawn the instance on the hypervisor. [ 863.660458] env[61998]: DEBUG nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 863.661784] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfe17d0-61ad-4073-bcef-1d56ed088d00 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.668194] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a437f4-76f9-4d44-bb6c-acf3676c22ca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.699147] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Releasing lock "refresh_cache-f3089d53-9c8f-4276-8e2e-0518cf29004b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.701503] env[61998]: DEBUG nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Instance network_info: |[{"id": "381f5c1d-c282-43f5-a1b4-7ef6b8c559bc", "address": "fa:16:3e:c8:7f:a6", "network": {"id": "cf2d3673-094a-4ef2-87e9-9cc4479875fb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1600496719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b62db0465c54ceb9182aa64a42a7839", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap381f5c1d-c2", "ovs_interfaceid": "381f5c1d-c282-43f5-a1b4-7ef6b8c559bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 863.701503] env[61998]: DEBUG oslo_concurrency.lockutils [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] Acquired lock "refresh_cache-f3089d53-9c8f-4276-8e2e-0518cf29004b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.701830] env[61998]: DEBUG nova.network.neutron [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Refreshing network info cache for port 381f5c1d-c282-43f5-a1b4-7ef6b8c559bc {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 863.701830] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:7f:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69f65356-c85e-4b7f-ad28-7c7b5e8cf50c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '381f5c1d-c282-43f5-a1b4-7ef6b8c559bc', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.708499] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Creating folder: Project (0b62db0465c54ceb9182aa64a42a7839). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.709228] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e046e11e-002b-45d1-acb8-be5412574814 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.721360] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Created folder: Project (0b62db0465c54ceb9182aa64a42a7839) in parent group-v294665. [ 863.721583] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Creating folder: Instances. Parent ref: group-v294725. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.723279] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-089bff0f-15ec-4a29-9ac0-44236adc7157 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.726397] env[61998]: DEBUG nova.compute.manager [req-83564cde-6159-47bd-b004-b98f61ca100b req-ddf262df-8107-43ff-8f46-ddeb609ffffc service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Received event network-vif-deleted-994d79bc-7e50-47c8-9a8c-1f381d9d3fe8 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 863.726397] env[61998]: INFO nova.compute.manager [req-83564cde-6159-47bd-b004-b98f61ca100b req-ddf262df-8107-43ff-8f46-ddeb609ffffc service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Neutron deleted interface 994d79bc-7e50-47c8-9a8c-1f381d9d3fe8; detaching it from the instance and deleting it from the info cache [ 863.726543] env[61998]: DEBUG nova.network.neutron [req-83564cde-6159-47bd-b004-b98f61ca100b req-ddf262df-8107-43ff-8f46-ddeb609ffffc service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.736829] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Created folder: Instances in parent group-v294725. [ 863.737022] env[61998]: DEBUG oslo.service.loopingcall [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.737621] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.737835] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abfc3a2f-529c-4776-bd08-115a9c52877f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.757286] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.757286] env[61998]: value = "task-1388587" [ 863.757286] env[61998]: _type = "Task" [ 863.757286] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.766734] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388587, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.796768] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5259715c-37e9-2c4d-27f9-2eafa5919c48, 'name': SearchDatastore_Task, 'duration_secs': 0.027979} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.797705] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.797705] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] bcb05670-dc58-46be-a4a9-58a260e4132f/bcb05670-dc58-46be-a4a9-58a260e4132f.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.797705] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-490daab6-b920-41dc-94a7-9354eeaf6f04 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.803659] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 863.803659] env[61998]: value = "task-1388588" [ 863.803659] env[61998]: _type = "Task" [ 863.803659] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.811015] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.927779] env[61998]: DEBUG nova.compute.manager [req-a68348cc-6943-4a54-aeec-67fcaf1e5412 req-b746c307-d982-4e80-a75a-c86559af7e31 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Received event network-vif-plugged-a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 863.928329] env[61998]: DEBUG oslo_concurrency.lockutils [req-a68348cc-6943-4a54-aeec-67fcaf1e5412 req-b746c307-d982-4e80-a75a-c86559af7e31 service nova] Acquiring lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.928727] env[61998]: DEBUG oslo_concurrency.lockutils [req-a68348cc-6943-4a54-aeec-67fcaf1e5412 req-b746c307-d982-4e80-a75a-c86559af7e31 service nova] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.928962] env[61998]: DEBUG oslo_concurrency.lockutils [req-a68348cc-6943-4a54-aeec-67fcaf1e5412 req-b746c307-d982-4e80-a75a-c86559af7e31 service nova] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.929233] env[61998]: DEBUG nova.compute.manager [req-a68348cc-6943-4a54-aeec-67fcaf1e5412 req-b746c307-d982-4e80-a75a-c86559af7e31 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] No waiting events found dispatching network-vif-plugged-a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.929452] env[61998]: WARNING nova.compute.manager [req-a68348cc-6943-4a54-aeec-67fcaf1e5412 req-b746c307-d982-4e80-a75a-c86559af7e31 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Received unexpected event network-vif-plugged-a1959129-1e34-4499-b312-c6580996cd63 for instance with vm_state building and task_state spawning. [ 863.930794] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.931702] env[61998]: DEBUG nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 863.935082] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.002s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.937241] env[61998]: INFO nova.compute.claims [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.187209] env[61998]: INFO nova.compute.manager [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Took 25.54 seconds to build instance. [ 864.209228] env[61998]: DEBUG nova.network.neutron [-] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.210932] env[61998]: DEBUG nova.network.neutron [-] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.232337] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64baab23-0a8b-4af5-87e1-82473ee4cf2a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.241722] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce236603-c964-4e7a-bf70-38116b0a481d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.273745] env[61998]: DEBUG nova.compute.manager [req-83564cde-6159-47bd-b004-b98f61ca100b req-ddf262df-8107-43ff-8f46-ddeb609ffffc service nova] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Detach interface failed, port_id=994d79bc-7e50-47c8-9a8c-1f381d9d3fe8, reason: Instance f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 864.277275] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388587, 'name': CreateVM_Task, 'duration_secs': 0.35681} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.277491] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.278291] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.278413] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.279410] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 864.279410] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e67cb8b8-50f3-4431-86c6-b63da1038180 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.283962] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 864.283962] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52354575-7368-ad59-b6c1-6ae435e11630" [ 864.283962] env[61998]: _type = "Task" [ 864.283962] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.293477] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52354575-7368-ad59-b6c1-6ae435e11630, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.312318] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388588, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451361} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.312576] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] bcb05670-dc58-46be-a4a9-58a260e4132f/bcb05670-dc58-46be-a4a9-58a260e4132f.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.312829] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.313071] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64f18330-ac87-4c1b-9b90-621ff5c97403 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.320134] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 864.320134] env[61998]: value = "task-1388589" [ 864.320134] env[61998]: _type = "Task" [ 864.320134] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.329914] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388589, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.446742] env[61998]: DEBUG nova.compute.utils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.448109] env[61998]: DEBUG nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 864.448351] env[61998]: DEBUG nova.network.neutron [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.482382] env[61998]: DEBUG nova.network.neutron [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Updated VIF entry in instance network info cache for port 381f5c1d-c282-43f5-a1b4-7ef6b8c559bc. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 864.482731] env[61998]: DEBUG nova.network.neutron [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Updating instance_info_cache with network_info: [{"id": "381f5c1d-c282-43f5-a1b4-7ef6b8c559bc", "address": "fa:16:3e:c8:7f:a6", "network": {"id": "cf2d3673-094a-4ef2-87e9-9cc4479875fb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1600496719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b62db0465c54ceb9182aa64a42a7839", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap381f5c1d-c2", "ovs_interfaceid": "381f5c1d-c282-43f5-a1b4-7ef6b8c559bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.494603] env[61998]: DEBUG nova.policy [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '264431d91ffa4074ab7e9e6fc562616a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7b0f9307923448bbd7b245df28f97f1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 864.541099] env[61998]: DEBUG nova.network.neutron [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Successfully updated port: a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.689396] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5b861d81-6d51-44c6-82ca-9c90f72f310a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.252s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.715080] env[61998]: INFO nova.compute.manager [-] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Took 1.26 seconds to deallocate network for instance. [ 864.715479] env[61998]: INFO nova.compute.manager [-] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Took 1.26 seconds to deallocate network for instance. [ 864.795254] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52354575-7368-ad59-b6c1-6ae435e11630, 'name': SearchDatastore_Task, 'duration_secs': 0.00903} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.795563] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.795794] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.796028] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.796175] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.796346] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.796614] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6787d9f4-d39b-498f-9a30-4fbdf5eac87f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.805156] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.805340] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.806068] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a7bf44b-6ba5-4415-b570-ae42ac25119e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.813015] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 864.813015] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52e31b60-4cd6-ceda-d06b-0028eb6ddab0" [ 864.813015] env[61998]: _type = "Task" [ 864.813015] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.820751] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52e31b60-4cd6-ceda-d06b-0028eb6ddab0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.833287] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388589, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065115} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.833539] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.834325] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f6a7b3-924f-4bb0-8d1d-acf4ac42baf8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.856681] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] bcb05670-dc58-46be-a4a9-58a260e4132f/bcb05670-dc58-46be-a4a9-58a260e4132f.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.857322] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caf28fbd-c720-41e0-a949-cd98faa55b52 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.880726] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 864.880726] env[61998]: value = "task-1388590" [ 864.880726] env[61998]: _type = "Task" [ 864.880726] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.889273] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388590, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.952260] env[61998]: DEBUG nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 864.985753] env[61998]: DEBUG oslo_concurrency.lockutils [req-f98f18d9-9d8d-4013-8214-0cc2da2d24e6 req-6158b408-ed95-4ebe-88b8-0fc66f28a18d service nova] Releasing lock "refresh_cache-f3089d53-9c8f-4276-8e2e-0518cf29004b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.046145] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.046227] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquired lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.046384] env[61998]: DEBUG nova.network.neutron [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.065493] env[61998]: DEBUG nova.network.neutron [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Successfully created port: 2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.184757] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bda1996-1040-464c-bf14-e6f9ecdc772e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.192479] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60d52f3-9074-4581-9b60-c4ffd30b21a6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.226102] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.226883] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.227819] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7199c44-401c-40d2-a0a2-8f0e26643a01 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.235629] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b6c83f-92ad-4ee0-b59e-aef12c4d211b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.249261] env[61998]: DEBUG nova.compute.provider_tree [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.323411] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52e31b60-4cd6-ceda-d06b-0028eb6ddab0, 'name': SearchDatastore_Task, 'duration_secs': 0.008155} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.324199] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa457743-4535-442a-8606-fba0e93c8da4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.331293] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.331506] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.331659] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.331872] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.332278] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.334096] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 865.334096] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52707ce9-13d0-9a04-bc25-e6b672ddf773" [ 865.334096] env[61998]: _type = "Task" [ 865.334096] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.334560] env[61998]: INFO nova.compute.manager [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Terminating instance [ 865.336486] env[61998]: DEBUG nova.compute.manager [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 865.336669] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 865.340729] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c26b77c-7104-4b98-864c-5f43b6d25347 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.348095] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 865.351164] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-785f9dea-1dbb-41d2-b379-8c7726704255 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.352696] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52707ce9-13d0-9a04-bc25-e6b672ddf773, 'name': SearchDatastore_Task, 'duration_secs': 0.010209} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.352997] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.353218] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] f3089d53-9c8f-4276-8e2e-0518cf29004b/f3089d53-9c8f-4276-8e2e-0518cf29004b.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.353764] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42814aad-cce7-4b78-be67-971a90b44a23 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.358909] env[61998]: DEBUG oslo_vmware.api [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 865.358909] env[61998]: value = "task-1388591" [ 865.358909] env[61998]: _type = "Task" [ 865.358909] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.363313] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 865.363313] env[61998]: value = "task-1388592" [ 865.363313] env[61998]: _type = "Task" [ 865.363313] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.369750] env[61998]: DEBUG oslo_vmware.api [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.375437] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.378056] env[61998]: DEBUG nova.compute.manager [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Received event network-vif-deleted-a6cc2f5c-f8f0-4800-9361-1ecc4455015b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 865.378225] env[61998]: DEBUG nova.compute.manager [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Received event network-changed-a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 865.378389] env[61998]: DEBUG nova.compute.manager [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Refreshing instance network info cache due to event network-changed-a1959129-1e34-4499-b312-c6580996cd63. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 865.378598] env[61998]: DEBUG oslo_concurrency.lockutils [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] Acquiring lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.390752] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388590, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.586316] env[61998]: DEBUG nova.network.neutron [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.752216] env[61998]: DEBUG nova.scheduler.client.report [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 865.805013] env[61998]: DEBUG nova.network.neutron [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Updating instance_info_cache with network_info: [{"id": "a1959129-1e34-4499-b312-c6580996cd63", "address": "fa:16:3e:82:ed:f8", "network": {"id": "2d9ba45d-5e99-4c38-80b2-6b9ca58acf95", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1139375101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2b30526cfa64ab4802e1385aeaf9103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1959129-1e", "ovs_interfaceid": "a1959129-1e34-4499-b312-c6580996cd63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.876772] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459285} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.876772] env[61998]: DEBUG oslo_vmware.api [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388591, 'name': PowerOffVM_Task, 'duration_secs': 0.200287} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.876772] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] f3089d53-9c8f-4276-8e2e-0518cf29004b/f3089d53-9c8f-4276-8e2e-0518cf29004b.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.876772] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.877057] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 865.877057] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 865.877359] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64a3c196-cb95-4d3c-b7cd-ed706e976fd5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.879127] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f88621dc-0cde-4ab7-b74b-8b501aba1e30 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.888182] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 865.888182] env[61998]: value = "task-1388593" [ 865.888182] env[61998]: _type = "Task" [ 865.888182] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.894691] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388590, 'name': ReconfigVM_Task, 'duration_secs': 0.649825} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.895695] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Reconfigured VM instance instance-00000049 to attach disk [datastore1] bcb05670-dc58-46be-a4a9-58a260e4132f/bcb05670-dc58-46be-a4a9-58a260e4132f.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.896395] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-615381d3-3ee9-4391-9cc4-d7ca4ef6fddb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.900664] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.906241] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 865.906241] env[61998]: value = "task-1388595" [ 865.906241] env[61998]: _type = "Task" [ 865.906241] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.915171] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388595, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.965628] env[61998]: DEBUG nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 865.990860] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.991118] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.991280] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.991459] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.991605] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.991749] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.991952] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.992124] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.992326] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.992487] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.992664] env[61998]: DEBUG nova.virt.hardware [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.993528] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40875f52-059a-4ef1-b701-b85c949bfe0e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.001678] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcd6139-4f51-4c8a-b570-7567efec697f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.257434] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.257988] env[61998]: DEBUG nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 866.260566] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.880s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.261300] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.261300] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 866.261300] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.463s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.262643] env[61998]: INFO nova.compute.claims [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.267115] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec080bdb-f2a5-46e7-9f3d-13de2ed54c34 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.279552] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13b9766-1aa1-4aca-8f0d-34b30c4a414f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.298128] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0964995f-0685-4d73-b28d-0c87a42ad60c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.307623] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbda8394-9fd1-4717-aac4-4edc80da7043 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.311241] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Releasing lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.311545] env[61998]: DEBUG nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Instance network_info: |[{"id": "a1959129-1e34-4499-b312-c6580996cd63", "address": "fa:16:3e:82:ed:f8", "network": {"id": "2d9ba45d-5e99-4c38-80b2-6b9ca58acf95", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1139375101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2b30526cfa64ab4802e1385aeaf9103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1959129-1e", "ovs_interfaceid": "a1959129-1e34-4499-b312-c6580996cd63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 866.312095] env[61998]: DEBUG oslo_concurrency.lockutils [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] Acquired lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.312301] env[61998]: DEBUG nova.network.neutron [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Refreshing network info cache for port a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 866.313382] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:ed:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4a9e02-45f1-4afb-8abb-0de26b153086', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1959129-1e34-4499-b312-c6580996cd63', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.320799] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Creating folder: Project (a2b30526cfa64ab4802e1385aeaf9103). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.324241] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11e9ca66-9dfe-47eb-9b36-e401d8450616 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.355054] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181127MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 866.355363] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.367793] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Created folder: Project (a2b30526cfa64ab4802e1385aeaf9103) in parent group-v294665. [ 866.368098] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Creating folder: Instances. Parent ref: group-v294728. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.368619] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa4c9fc0-f2cd-4286-8eb1-4b2aff6ae1a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.380016] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Created folder: Instances in parent group-v294728. [ 866.380304] env[61998]: DEBUG oslo.service.loopingcall [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.380481] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.380774] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af45ad09-94ae-42f2-8f5a-f0fad2032cc4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.409525] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065396} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.414118] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.414481] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.414481] env[61998]: value = "task-1388598" [ 866.414481] env[61998]: _type = "Task" [ 866.414481] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.415161] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fd2115-ff44-4a7a-b16a-5122aeb0ed0a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.425168] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388595, 'name': Rename_Task, 'duration_secs': 0.164813} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.426547] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.426817] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38b77038-aa4a-4921-96a1-68b2934fdfb4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.446747] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] f3089d53-9c8f-4276-8e2e-0518cf29004b/f3089d53-9c8f-4276-8e2e-0518cf29004b.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.449094] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9a3e9dc-17e4-40f6-acfc-f065f804133b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.467259] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 866.467259] env[61998]: value = "task-1388599" [ 866.467259] env[61998]: _type = "Task" [ 866.467259] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.471116] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388598, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.476236] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 866.476236] env[61998]: value = "task-1388600" [ 866.476236] env[61998]: _type = "Task" [ 866.476236] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.482339] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388599, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.487715] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388600, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.741211] env[61998]: DEBUG nova.network.neutron [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Updated VIF entry in instance network info cache for port a1959129-1e34-4499-b312-c6580996cd63. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.741575] env[61998]: DEBUG nova.network.neutron [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Updating instance_info_cache with network_info: [{"id": "a1959129-1e34-4499-b312-c6580996cd63", "address": "fa:16:3e:82:ed:f8", "network": {"id": "2d9ba45d-5e99-4c38-80b2-6b9ca58acf95", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1139375101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2b30526cfa64ab4802e1385aeaf9103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1959129-1e", "ovs_interfaceid": "a1959129-1e34-4499-b312-c6580996cd63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.768021] env[61998]: DEBUG nova.compute.utils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 866.770525] env[61998]: DEBUG nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 866.770710] env[61998]: DEBUG nova.network.neutron [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 866.803125] env[61998]: DEBUG nova.network.neutron [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Successfully updated port: 2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.833483] env[61998]: DEBUG nova.policy [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '216727fb9a78443b99ec1bc4ca360709', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fefd01743e22471e81b557492839cf5c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 866.929851] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388598, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.984497] env[61998]: DEBUG oslo_vmware.api [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388599, 'name': PowerOnVM_Task, 'duration_secs': 0.523888} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.985732] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.986095] env[61998]: INFO nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Took 8.22 seconds to spawn the instance on the hypervisor. [ 866.986420] env[61998]: DEBUG nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 866.987314] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685445f6-cc40-4177-833e-56de856e3038 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.993592] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388600, 'name': ReconfigVM_Task, 'duration_secs': 0.397188} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.994551] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Reconfigured VM instance instance-0000004a to attach disk [datastore2] f3089d53-9c8f-4276-8e2e-0518cf29004b/f3089d53-9c8f-4276-8e2e-0518cf29004b.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.995356] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04f4577f-f0a6-43e9-80f9-7826cee568f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.008704] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 867.008704] env[61998]: value = "task-1388601" [ 867.008704] env[61998]: _type = "Task" [ 867.008704] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.020550] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388601, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.212435] env[61998]: DEBUG nova.network.neutron [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Successfully created port: c8d6d30a-6ea6-4e25-b740-f859a1482020 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.244846] env[61998]: DEBUG oslo_concurrency.lockutils [req-022b48b9-7067-4c85-b4eb-8ae4e779f3f5 req-72a2b60e-d65f-4224-bb19-e07af9a483f5 service nova] Releasing lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.279128] env[61998]: DEBUG nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 867.308426] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.308586] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquired lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.308736] env[61998]: DEBUG nova.network.neutron [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.415858] env[61998]: DEBUG nova.compute.manager [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Received event network-vif-plugged-2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 867.416096] env[61998]: DEBUG oslo_concurrency.lockutils [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.416315] env[61998]: DEBUG oslo_concurrency.lockutils [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.416479] env[61998]: DEBUG oslo_concurrency.lockutils [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.416777] env[61998]: DEBUG nova.compute.manager [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] No waiting events found dispatching network-vif-plugged-2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.416827] env[61998]: WARNING nova.compute.manager [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Received unexpected event network-vif-plugged-2c555663-7a18-4eba-9038-f975654d0400 for instance with vm_state building and task_state spawning. [ 867.416961] env[61998]: DEBUG nova.compute.manager [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Received event network-changed-2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 867.417222] env[61998]: DEBUG nova.compute.manager [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Refreshing instance network info cache due to event network-changed-2c555663-7a18-4eba-9038-f975654d0400. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 867.417414] env[61998]: DEBUG oslo_concurrency.lockutils [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] Acquiring lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.429265] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388598, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.514012] env[61998]: INFO nova.compute.manager [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Took 27.47 seconds to build instance. [ 867.520692] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388601, 'name': Rename_Task, 'duration_secs': 0.41576} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.521720] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.522547] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e83d43-0139-4e3d-9ff2-4cd517773838 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.524897] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a3885ab-1209-4e56-ba32-b32e410810cd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.530913] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3428e90-e4bc-4cd8-b206-f7a7e3480f59 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.534812] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 867.534812] env[61998]: value = "task-1388602" [ 867.534812] env[61998]: _type = "Task" [ 867.534812] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.562937] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef151db-71c9-4a2a-9048-7fa37c47f954 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.568178] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388602, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.572745] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa315b34-2c9c-48a8-83d6-78bc90a157b0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.587274] env[61998]: DEBUG nova.compute.provider_tree [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.842411] env[61998]: DEBUG nova.network.neutron [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.928448] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388598, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.017320] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9477b457-50bf-4e5d-8dc1-f59af6cb6cfe tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.909s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.035407] env[61998]: DEBUG nova.network.neutron [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updating instance_info_cache with network_info: [{"id": "2c555663-7a18-4eba-9038-f975654d0400", "address": "fa:16:3e:ea:aa:ac", "network": {"id": "017a5b7a-4001-4ccd-8656-94c62da1b694", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1740525260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f7b0f9307923448bbd7b245df28f97f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c555663-7a", "ovs_interfaceid": "2c555663-7a18-4eba-9038-f975654d0400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.048547] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388602, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.090443] env[61998]: DEBUG nova.scheduler.client.report [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 868.292280] env[61998]: DEBUG nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 868.317792] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 868.318124] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 868.318289] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.318415] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 868.319051] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.319051] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 868.319051] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 868.319051] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 868.319266] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 868.319464] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 868.319512] env[61998]: DEBUG nova.virt.hardware [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 868.320388] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dda7982-d682-487e-bccf-faffa90f38ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.328227] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5137df61-f5a7-4736-8374-e09c6ec61b92 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.428822] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388598, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.535808] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.536050] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.536241] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleting the datastore file [datastore1] ac4a8463-91ba-4061-aa5d-1c72c4f532ce {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.536506] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce4d2009-fbd7-4ac1-a85f-b06d24833773 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.543046] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Releasing lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.543343] env[61998]: DEBUG nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Instance network_info: |[{"id": "2c555663-7a18-4eba-9038-f975654d0400", "address": "fa:16:3e:ea:aa:ac", "network": {"id": "017a5b7a-4001-4ccd-8656-94c62da1b694", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1740525260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f7b0f9307923448bbd7b245df28f97f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c555663-7a", "ovs_interfaceid": "2c555663-7a18-4eba-9038-f975654d0400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 868.546858] env[61998]: DEBUG oslo_concurrency.lockutils [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] Acquired lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.547054] env[61998]: DEBUG nova.network.neutron [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Refreshing network info cache for port 2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.548212] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:aa:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8abee039-d93e-48a7-8911-6416a3e1ff30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c555663-7a18-4eba-9038-f975654d0400', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 868.555589] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Creating folder: Project (f7b0f9307923448bbd7b245df28f97f1). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 868.555867] env[61998]: DEBUG oslo_vmware.api [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388602, 'name': PowerOnVM_Task, 'duration_secs': 0.549066} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.559803] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a5385fe-006d-4c3e-acda-a78f647a0e35 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.561376] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.561578] env[61998]: INFO nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Took 7.39 seconds to spawn the instance on the hypervisor. [ 868.561757] env[61998]: DEBUG nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 868.562379] env[61998]: DEBUG oslo_vmware.api [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 868.562379] env[61998]: value = "task-1388603" [ 868.562379] env[61998]: _type = "Task" [ 868.562379] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.563074] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8d68fc-aaed-4567-bc42-1d862fc81029 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.579743] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Created folder: Project (f7b0f9307923448bbd7b245df28f97f1) in parent group-v294665. [ 868.579908] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Creating folder: Instances. Parent ref: group-v294731. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 868.580155] env[61998]: DEBUG oslo_vmware.api [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388603, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.580738] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffa4ca27-650d-4b36-a7e1-2f4f787802ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.589727] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Created folder: Instances in parent group-v294731. [ 868.589948] env[61998]: DEBUG oslo.service.loopingcall [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.590144] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 868.590331] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc1f3f6a-a6ec-4219-8d7d-42b1f9fba67f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.607548] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.608203] env[61998]: DEBUG nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 868.611811] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.418s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.612011] env[61998]: DEBUG nova.objects.instance [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61998) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 868.618617] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 868.618617] env[61998]: value = "task-1388606" [ 868.618617] env[61998]: _type = "Task" [ 868.618617] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.626662] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388606, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.719978] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "bcb05670-dc58-46be-a4a9-58a260e4132f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.720343] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.720623] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "bcb05670-dc58-46be-a4a9-58a260e4132f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.720874] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.721116] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.725758] env[61998]: INFO nova.compute.manager [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Terminating instance [ 868.728152] env[61998]: DEBUG nova.compute.manager [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 868.728370] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.729420] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a5bb71-87e1-48b2-a8d8-c9491ca16665 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.737277] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.737592] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9b73f2e-8567-4186-a12b-c97af86aeeee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.744569] env[61998]: DEBUG oslo_vmware.api [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 868.744569] env[61998]: value = "task-1388607" [ 868.744569] env[61998]: _type = "Task" [ 868.744569] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.752530] env[61998]: DEBUG oslo_vmware.api [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388607, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.931802] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388598, 'name': CreateVM_Task, 'duration_secs': 2.306715} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.936280] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.937512] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.937709] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.938096] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.938661] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a85376dc-f4e4-4a4d-a202-fa019ac418ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.944216] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 868.944216] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]521756d9-bacd-3806-6a70-45c0afcb1b71" [ 868.944216] env[61998]: _type = "Task" [ 868.944216] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.944789] env[61998]: DEBUG nova.network.neutron [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Successfully updated port: c8d6d30a-6ea6-4e25-b740-f859a1482020 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.957075] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521756d9-bacd-3806-6a70-45c0afcb1b71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.987651] env[61998]: DEBUG nova.network.neutron [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updated VIF entry in instance network info cache for port 2c555663-7a18-4eba-9038-f975654d0400. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.988029] env[61998]: DEBUG nova.network.neutron [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updating instance_info_cache with network_info: [{"id": "2c555663-7a18-4eba-9038-f975654d0400", "address": "fa:16:3e:ea:aa:ac", "network": {"id": "017a5b7a-4001-4ccd-8656-94c62da1b694", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1740525260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f7b0f9307923448bbd7b245df28f97f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c555663-7a", "ovs_interfaceid": "2c555663-7a18-4eba-9038-f975654d0400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.074833] env[61998]: DEBUG oslo_vmware.api [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388603, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207975} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.075090] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.075282] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.075649] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.075868] env[61998]: INFO nova.compute.manager [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Took 3.74 seconds to destroy the instance on the hypervisor. [ 869.076134] env[61998]: DEBUG oslo.service.loopingcall [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.076332] env[61998]: DEBUG nova.compute.manager [-] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 869.076428] env[61998]: DEBUG nova.network.neutron [-] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.089996] env[61998]: INFO nova.compute.manager [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Took 26.28 seconds to build instance. [ 869.113090] env[61998]: DEBUG nova.compute.utils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 869.114867] env[61998]: DEBUG nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 869.114867] env[61998]: DEBUG nova.network.neutron [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.130840] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388606, 'name': CreateVM_Task, 'duration_secs': 0.303037} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.131018] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 869.131669] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.198380] env[61998]: DEBUG nova.policy [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17f109d724201a22264aa6ee02ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82b8854f80cf48628167fd6f678d7dd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 869.254567] env[61998]: DEBUG oslo_vmware.api [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388607, 'name': PowerOffVM_Task, 'duration_secs': 0.198619} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.254836] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.255007] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.255271] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56990521-67d1-4acc-9e41-d77f79f8d995 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.321927] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.322171] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.322359] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Deleting the datastore file [datastore1] bcb05670-dc58-46be-a4a9-58a260e4132f {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.322659] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eab02397-439a-4231-b586-34f89da4ba94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.332240] env[61998]: DEBUG oslo_vmware.api [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for the task: (returnval){ [ 869.332240] env[61998]: value = "task-1388609" [ 869.332240] env[61998]: _type = "Task" [ 869.332240] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.338675] env[61998]: DEBUG oslo_vmware.api [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388609, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.449861] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "refresh_cache-1206c5c7-3eae-437b-9386-f3af937b8795" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.450066] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquired lock "refresh_cache-1206c5c7-3eae-437b-9386-f3af937b8795" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.450184] env[61998]: DEBUG nova.network.neutron [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.464908] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521756d9-bacd-3806-6a70-45c0afcb1b71, 'name': SearchDatastore_Task, 'duration_secs': 0.010701} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.466025] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.466163] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.466421] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.466579] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.466776] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.467090] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.467406] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 869.467885] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7040bc57-aac7-4868-b043-e33f5193871b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.469925] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e0fc80-0fe6-4109-9f6c-f535926fc931 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.475285] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 869.475285] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5273c144-3610-cbe2-11f9-a93b5213e435" [ 869.475285] env[61998]: _type = "Task" [ 869.475285] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.485378] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5273c144-3610-cbe2-11f9-a93b5213e435, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.487968] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.488156] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.488886] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687b2565-bedd-4591-a363-6900494e89d7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.491477] env[61998]: DEBUG oslo_concurrency.lockutils [req-6224528a-ffe3-4c56-a66f-b466c4ee2265 req-63422878-6d3b-49af-864f-c454ea7d5de8 service nova] Releasing lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.494676] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 869.494676] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]526bc0ac-3bb5-d115-39ab-0ffd64fb6096" [ 869.494676] env[61998]: _type = "Task" [ 869.494676] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.502672] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526bc0ac-3bb5-d115-39ab-0ffd64fb6096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.571209] env[61998]: DEBUG nova.compute.manager [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Received event network-vif-plugged-c8d6d30a-6ea6-4e25-b740-f859a1482020 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 869.571427] env[61998]: DEBUG oslo_concurrency.lockutils [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] Acquiring lock "1206c5c7-3eae-437b-9386-f3af937b8795-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.571627] env[61998]: DEBUG oslo_concurrency.lockutils [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] Lock "1206c5c7-3eae-437b-9386-f3af937b8795-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.571788] env[61998]: DEBUG oslo_concurrency.lockutils [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] Lock "1206c5c7-3eae-437b-9386-f3af937b8795-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.571952] env[61998]: DEBUG nova.compute.manager [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] No waiting events found dispatching network-vif-plugged-c8d6d30a-6ea6-4e25-b740-f859a1482020 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 869.572180] env[61998]: WARNING nova.compute.manager [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Received unexpected event network-vif-plugged-c8d6d30a-6ea6-4e25-b740-f859a1482020 for instance with vm_state building and task_state spawning. [ 869.572363] env[61998]: DEBUG nova.compute.manager [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Received event network-changed-c8d6d30a-6ea6-4e25-b740-f859a1482020 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 869.572539] env[61998]: DEBUG nova.compute.manager [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Refreshing instance network info cache due to event network-changed-c8d6d30a-6ea6-4e25-b740-f859a1482020. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 869.572641] env[61998]: DEBUG oslo_concurrency.lockutils [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] Acquiring lock "refresh_cache-1206c5c7-3eae-437b-9386-f3af937b8795" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.591953] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9822ae29-d05f-478a-8414-3d9e930a0b6e tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.201s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.617063] env[61998]: DEBUG nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 869.621362] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5ec2e9ca-b93a-4bcb-9c59-db709f8a013e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.622179] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.101s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.622450] env[61998]: DEBUG nova.objects.instance [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lazy-loading 'resources' on Instance uuid 5eb786f1-7789-48a0-a04e-a4039e387f58 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 869.699120] env[61998]: DEBUG nova.network.neutron [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Successfully created port: 089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.839295] env[61998]: DEBUG oslo_vmware.api [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Task: {'id': task-1388609, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167613} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.839583] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.839796] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.839976] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.840160] env[61998]: INFO nova.compute.manager [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 869.840392] env[61998]: DEBUG oslo.service.loopingcall [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.840871] env[61998]: DEBUG nova.compute.manager [-] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 869.840969] env[61998]: DEBUG nova.network.neutron [-] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.989918] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5273c144-3610-cbe2-11f9-a93b5213e435, 'name': SearchDatastore_Task, 'duration_secs': 0.022555} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.990242] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.990483] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.990759] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.009344] env[61998]: DEBUG nova.network.neutron [-] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.009344] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526bc0ac-3bb5-d115-39ab-0ffd64fb6096, 'name': SearchDatastore_Task, 'duration_secs': 0.009788} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.010113] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f6562fd-d816-4560-989a-fb25c3333c45 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.015212] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 870.015212] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529f7b2f-b3ce-cf47-3dc6-fea6871f604d" [ 870.015212] env[61998]: _type = "Task" [ 870.015212] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.024833] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529f7b2f-b3ce-cf47-3dc6-fea6871f604d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.026623] env[61998]: DEBUG nova.network.neutron [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.211044] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "f3089d53-9c8f-4276-8e2e-0518cf29004b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.211044] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.211235] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "f3089d53-9c8f-4276-8e2e-0518cf29004b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.214732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.214732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.214732] env[61998]: INFO nova.compute.manager [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Terminating instance [ 870.215985] env[61998]: DEBUG nova.compute.manager [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 870.215985] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 870.216609] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db9e1b6-5b43-41f2-912f-51742b64e10b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.226369] env[61998]: DEBUG nova.network.neutron [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Updating instance_info_cache with network_info: [{"id": "c8d6d30a-6ea6-4e25-b740-f859a1482020", "address": "fa:16:3e:83:00:1e", "network": {"id": "e87bcfc9-ad36-42bf-bbb7-c0c3268c3ea6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-132472026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefd01743e22471e81b557492839cf5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8d6d30a-6e", "ovs_interfaceid": "c8d6d30a-6ea6-4e25-b740-f859a1482020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.228278] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 870.228941] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4058973-e4e3-48ca-a6cc-06034165df27 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.235421] env[61998]: DEBUG oslo_vmware.api [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 870.235421] env[61998]: value = "task-1388610" [ 870.235421] env[61998]: _type = "Task" [ 870.235421] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.247023] env[61998]: DEBUG oslo_vmware.api [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388610, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.392377] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b696de95-ba0d-4f20-89df-1141d06d2522 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.399447] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75ec350-6ea6-4ab1-a07b-8ae370cc3126 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.430646] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b715c232-ae12-4a66-8278-4a57c01da50f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.438756] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0d8841-4fe1-41bf-b5a8-13d4eb3163d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.452046] env[61998]: DEBUG nova.compute.provider_tree [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.513247] env[61998]: INFO nova.compute.manager [-] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Took 1.44 seconds to deallocate network for instance. [ 870.528128] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529f7b2f-b3ce-cf47-3dc6-fea6871f604d, 'name': SearchDatastore_Task, 'duration_secs': 0.010074} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.528857] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.528857] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 4ca7de74-3bcb-4da0-a2e1-573584467cc9/4ca7de74-3bcb-4da0-a2e1-573584467cc9.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.529014] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.529100] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.529306] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1854930-e462-46d1-b987-d0f9177e991d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.531321] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c269b805-041f-4f9d-bb38-808b7e3d0d93 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.539114] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 870.539114] env[61998]: value = "task-1388611" [ 870.539114] env[61998]: _type = "Task" [ 870.539114] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.543347] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.543538] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.544675] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af12517f-6222-46eb-8b65-092ce5aab1ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.550867] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388611, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.553914] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 870.553914] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5232c61d-44c2-8526-7d4c-9742a6fa5c3b" [ 870.553914] env[61998]: _type = "Task" [ 870.553914] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.561590] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5232c61d-44c2-8526-7d4c-9742a6fa5c3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.632640] env[61998]: DEBUG nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 870.658216] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 870.658475] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 870.658624] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.658808] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 870.658946] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.659107] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 870.659319] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 870.659479] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 870.659646] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 870.659873] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 870.660083] env[61998]: DEBUG nova.virt.hardware [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 870.661107] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120506e7-5758-485a-8d97-4b8cccc082db {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.669210] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825cca7a-e0fe-4945-a56e-9102e5dcdc23 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.693129] env[61998]: DEBUG nova.network.neutron [-] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.729965] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Releasing lock "refresh_cache-1206c5c7-3eae-437b-9386-f3af937b8795" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.730326] env[61998]: DEBUG nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Instance network_info: |[{"id": "c8d6d30a-6ea6-4e25-b740-f859a1482020", "address": "fa:16:3e:83:00:1e", "network": {"id": "e87bcfc9-ad36-42bf-bbb7-c0c3268c3ea6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-132472026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefd01743e22471e81b557492839cf5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8d6d30a-6e", "ovs_interfaceid": "c8d6d30a-6ea6-4e25-b740-f859a1482020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 870.730638] env[61998]: DEBUG oslo_concurrency.lockutils [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] Acquired lock "refresh_cache-1206c5c7-3eae-437b-9386-f3af937b8795" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.730827] env[61998]: DEBUG nova.network.neutron [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Refreshing network info cache for port c8d6d30a-6ea6-4e25-b740-f859a1482020 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.732077] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:00:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8d6d30a-6ea6-4e25-b740-f859a1482020', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.742014] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Creating folder: Project (fefd01743e22471e81b557492839cf5c). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.746099] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a6047c3-1e12-4f70-b4b0-a932cc2ace68 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.758698] env[61998]: DEBUG oslo_vmware.api [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388610, 'name': PowerOffVM_Task, 'duration_secs': 0.180646} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.759026] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.759202] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.759469] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fe1de27-28e7-4069-a7f4-edb4ca9b27c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.762454] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Created folder: Project (fefd01743e22471e81b557492839cf5c) in parent group-v294665. [ 870.762657] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Creating folder: Instances. Parent ref: group-v294734. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.763317] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fc13ab6-29e1-4fde-82dd-9ef7ed6d03dd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.773114] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Created folder: Instances in parent group-v294734. [ 870.773425] env[61998]: DEBUG oslo.service.loopingcall [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.773662] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.773933] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7def0d9b-e805-4c89-8ca0-71bedd7a0728 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.800846] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.800846] env[61998]: value = "task-1388615" [ 870.800846] env[61998]: _type = "Task" [ 870.800846] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.809721] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388615, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.838872] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.839112] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.839299] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Deleting the datastore file [datastore2] f3089d53-9c8f-4276-8e2e-0518cf29004b {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.839579] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73f927de-7b46-49c6-a8c7-6e16952ee8de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.846377] env[61998]: DEBUG oslo_vmware.api [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for the task: (returnval){ [ 870.846377] env[61998]: value = "task-1388616" [ 870.846377] env[61998]: _type = "Task" [ 870.846377] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.857221] env[61998]: DEBUG oslo_vmware.api [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.955455] env[61998]: DEBUG nova.scheduler.client.report [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 871.003911] env[61998]: DEBUG nova.network.neutron [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Updated VIF entry in instance network info cache for port c8d6d30a-6ea6-4e25-b740-f859a1482020. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.004358] env[61998]: DEBUG nova.network.neutron [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Updating instance_info_cache with network_info: [{"id": "c8d6d30a-6ea6-4e25-b740-f859a1482020", "address": "fa:16:3e:83:00:1e", "network": {"id": "e87bcfc9-ad36-42bf-bbb7-c0c3268c3ea6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-132472026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fefd01743e22471e81b557492839cf5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8d6d30a-6e", "ovs_interfaceid": "c8d6d30a-6ea6-4e25-b740-f859a1482020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.023354] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.048548] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388611, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474597} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.048807] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 4ca7de74-3bcb-4da0-a2e1-573584467cc9/4ca7de74-3bcb-4da0-a2e1-573584467cc9.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.049074] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.049341] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44d46897-ff0d-40d9-bb75-948cc83b3641 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.058243] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 871.058243] env[61998]: value = "task-1388617" [ 871.058243] env[61998]: _type = "Task" [ 871.058243] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.065387] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5232c61d-44c2-8526-7d4c-9742a6fa5c3b, 'name': SearchDatastore_Task, 'duration_secs': 0.008274} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.066121] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c233bb9e-5e5e-4e51-adb4-1a36f84e064b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.070588] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.073122] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 871.073122] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52e1e256-1a17-3fab-86f0-b8ce4ee48bad" [ 871.073122] env[61998]: _type = "Task" [ 871.073122] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.080192] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52e1e256-1a17-3fab-86f0-b8ce4ee48bad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.195609] env[61998]: INFO nova.compute.manager [-] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Took 1.35 seconds to deallocate network for instance. [ 871.310623] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388615, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.356603] env[61998]: DEBUG oslo_vmware.api [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Task: {'id': task-1388616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288733} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.357070] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.357440] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.357779] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.358117] env[61998]: INFO nova.compute.manager [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 871.358504] env[61998]: DEBUG oslo.service.loopingcall [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.358849] env[61998]: DEBUG nova.compute.manager [-] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 871.359109] env[61998]: DEBUG nova.network.neutron [-] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.461529] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.839s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.463830] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.329s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.465370] env[61998]: INFO nova.compute.claims [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.476946] env[61998]: DEBUG nova.network.neutron [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Successfully updated port: 089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.499391] env[61998]: INFO nova.scheduler.client.report [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Deleted allocations for instance 5eb786f1-7789-48a0-a04e-a4039e387f58 [ 871.507638] env[61998]: DEBUG oslo_concurrency.lockutils [req-14fa0c23-4026-4fbe-84ea-e320e6ab2c53 req-ecbd4861-89be-4415-897d-776e8316a06a service nova] Releasing lock "refresh_cache-1206c5c7-3eae-437b-9386-f3af937b8795" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.572241] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067213} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.572770] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.576020] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d035ce23-d299-40d0-928e-e12ea4216839 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.605076] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 4ca7de74-3bcb-4da0-a2e1-573584467cc9/4ca7de74-3bcb-4da0-a2e1-573584467cc9.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.612866] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a926fcf7-025e-4d58-ae99-33b9129ab43a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.629281] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52e1e256-1a17-3fab-86f0-b8ce4ee48bad, 'name': SearchDatastore_Task, 'duration_secs': 0.008517} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.631799] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.632068] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e/0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.633713] env[61998]: DEBUG nova.compute.manager [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Received event network-vif-deleted-e3958acf-f252-41b8-84ce-2b216db5b0ff {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 871.633908] env[61998]: DEBUG nova.compute.manager [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Received event network-vif-deleted-294bd0fe-945d-4198-8a3c-13e489ae1134 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 871.634086] env[61998]: DEBUG nova.compute.manager [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Received event network-vif-plugged-089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 871.634272] env[61998]: DEBUG oslo_concurrency.lockutils [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] Acquiring lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.634590] env[61998]: DEBUG oslo_concurrency.lockutils [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.634723] env[61998]: DEBUG oslo_concurrency.lockutils [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.634861] env[61998]: DEBUG nova.compute.manager [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] No waiting events found dispatching network-vif-plugged-089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.635038] env[61998]: WARNING nova.compute.manager [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Received unexpected event network-vif-plugged-089c550f-d232-4727-b576-df921335d3e4 for instance with vm_state building and task_state spawning. [ 871.635204] env[61998]: DEBUG nova.compute.manager [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Received event network-changed-089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 871.635355] env[61998]: DEBUG nova.compute.manager [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Refreshing instance network info cache due to event network-changed-089c550f-d232-4727-b576-df921335d3e4. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 871.635643] env[61998]: DEBUG oslo_concurrency.lockutils [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] Acquiring lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.635932] env[61998]: DEBUG oslo_concurrency.lockutils [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] Acquired lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.636151] env[61998]: DEBUG nova.network.neutron [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Refreshing network info cache for port 089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.637774] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-101b4f69-b7f4-4656-b7f0-09d9b5b26a1c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.643175] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 871.643175] env[61998]: value = "task-1388618" [ 871.643175] env[61998]: _type = "Task" [ 871.643175] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.645145] env[61998]: DEBUG nova.compute.manager [req-c8fb356d-7811-4f54-b858-10974c0e780d req-3d89a63d-933b-4ab2-a6e7-30ca75326713 service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Received event network-vif-deleted-381f5c1d-c282-43f5-a1b4-7ef6b8c559bc {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 871.645326] env[61998]: INFO nova.compute.manager [req-c8fb356d-7811-4f54-b858-10974c0e780d req-3d89a63d-933b-4ab2-a6e7-30ca75326713 service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Neutron deleted interface 381f5c1d-c282-43f5-a1b4-7ef6b8c559bc; detaching it from the instance and deleting it from the info cache [ 871.645496] env[61998]: DEBUG nova.network.neutron [req-c8fb356d-7811-4f54-b858-10974c0e780d req-3d89a63d-933b-4ab2-a6e7-30ca75326713 service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.651382] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 871.651382] env[61998]: value = "task-1388619" [ 871.651382] env[61998]: _type = "Task" [ 871.651382] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.661491] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388618, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.667098] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.702169] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.812275] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388615, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.982140] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.008510] env[61998]: DEBUG oslo_concurrency.lockutils [None req-76ee5c1e-ebff-4c31-8d17-5bf567258f24 tempest-ServerShowV247Test-803251391 tempest-ServerShowV247Test-803251391-project-member] Lock "5eb786f1-7789-48a0-a04e-a4039e387f58" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.212s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.138655] env[61998]: DEBUG nova.network.neutron [-] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.154101] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388618, 'name': ReconfigVM_Task, 'duration_secs': 0.360741} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.154312] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-252c5815-eb0f-42b3-8945-8a1ba632a636 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.158779] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 4ca7de74-3bcb-4da0-a2e1-573584467cc9/4ca7de74-3bcb-4da0-a2e1-573584467cc9.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.159598] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a244eb0-8943-4b25-83fc-d8b3a40a7b96 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.166312] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388619, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476561} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.168133] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e/0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.168349] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.168643] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 872.168643] env[61998]: value = "task-1388620" [ 872.168643] env[61998]: _type = "Task" [ 872.168643] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.169131] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-616dbb60-5d67-4f93-9513-b56f68b67b00 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.173404] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c819c7-fb30-49df-827b-e44367df6dc1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.184017] env[61998]: DEBUG nova.network.neutron [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 872.194854] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388620, 'name': Rename_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.195974] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 872.195974] env[61998]: value = "task-1388621" [ 872.195974] env[61998]: _type = "Task" [ 872.195974] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.207682] env[61998]: DEBUG nova.compute.manager [req-c8fb356d-7811-4f54-b858-10974c0e780d req-3d89a63d-933b-4ab2-a6e7-30ca75326713 service nova] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Detach interface failed, port_id=381f5c1d-c282-43f5-a1b4-7ef6b8c559bc, reason: Instance f3089d53-9c8f-4276-8e2e-0518cf29004b could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 872.213138] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.312828] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388615, 'name': CreateVM_Task, 'duration_secs': 1.078964} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.313009] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 872.313710] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.313888] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.314229] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.314509] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63d6205f-bb5b-4f9c-b9d6-5fced847e8a7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.318805] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 872.318805] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528c4fcd-6eb9-98c0-3239-f2a4a128a4e1" [ 872.318805] env[61998]: _type = "Task" [ 872.318805] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.326872] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528c4fcd-6eb9-98c0-3239-f2a4a128a4e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.451856] env[61998]: DEBUG nova.network.neutron [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.641657] env[61998]: INFO nova.compute.manager [-] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Took 1.28 seconds to deallocate network for instance. [ 872.684148] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388620, 'name': Rename_Task, 'duration_secs': 0.146458} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.684491] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.684730] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ecfe5d1-1a48-494a-af34-4a80cd913cb3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.690792] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 872.690792] env[61998]: value = "task-1388622" [ 872.690792] env[61998]: _type = "Task" [ 872.690792] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.704417] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.710257] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170201} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.710693] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.712038] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76785fa9-6508-490b-864e-e8aaa278ab3d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.734643] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e/0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.735708] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2f05656-5428-4cc5-ba1a-71f0f5d8bbfa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.750993] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e10c374-b936-4f9e-9f1b-a2d5c963750c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.759155] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586ada80-e3ff-49b9-9f81-2c8bd5bad4e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.762798] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 872.762798] env[61998]: value = "task-1388623" [ 872.762798] env[61998]: _type = "Task" [ 872.762798] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.797287] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4715c1ca-ec9b-4664-8729-6630ba189035 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.799450] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388623, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.804749] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f1d2b8-dfc5-414b-8518-372fda712dd6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.820550] env[61998]: DEBUG nova.compute.provider_tree [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.828741] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528c4fcd-6eb9-98c0-3239-f2a4a128a4e1, 'name': SearchDatastore_Task, 'duration_secs': 0.009954} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.829571] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.829788] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.830025] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.830157] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.831286] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.831286] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b52578c2-2599-47dd-9537-bd8121c65cca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.838869] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.839071] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.839767] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dfab915-5b0d-40c0-9fc8-ebcc1d84a38c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.844851] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 872.844851] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52389d12-3662-8914-83b1-5a6f4c6b6b22" [ 872.844851] env[61998]: _type = "Task" [ 872.844851] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.852353] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52389d12-3662-8914-83b1-5a6f4c6b6b22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.954772] env[61998]: DEBUG oslo_concurrency.lockutils [req-32a72f87-af4f-425f-8ac1-fea4c05a83f7 req-a133cb82-ada3-4c8c-866c-0b07315dcb71 service nova] Releasing lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.956396] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.956396] env[61998]: DEBUG nova.network.neutron [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.153917] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.202641] env[61998]: DEBUG oslo_vmware.api [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388622, 'name': PowerOnVM_Task, 'duration_secs': 0.469865} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.202901] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.203115] env[61998]: INFO nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Took 9.60 seconds to spawn the instance on the hypervisor. [ 873.203299] env[61998]: DEBUG nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 873.204059] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa330894-f8c0-404f-89e8-d6e465444033 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.274071] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388623, 'name': ReconfigVM_Task, 'duration_secs': 0.365257} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.274071] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e/0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.274654] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52cb6b81-6e33-443d-b7ce-bc4e8af3e81a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.281159] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 873.281159] env[61998]: value = "task-1388624" [ 873.281159] env[61998]: _type = "Task" [ 873.281159] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.289654] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388624, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.325343] env[61998]: DEBUG nova.scheduler.client.report [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 873.354915] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52389d12-3662-8914-83b1-5a6f4c6b6b22, 'name': SearchDatastore_Task, 'duration_secs': 0.00838} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.355786] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11d8e643-fd21-4846-98f5-f505b1b04937 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.361173] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 873.361173] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52042d83-d1a7-bb5e-b169-72fe82bbcdd7" [ 873.361173] env[61998]: _type = "Task" [ 873.361173] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.368579] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52042d83-d1a7-bb5e-b169-72fe82bbcdd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.486017] env[61998]: DEBUG nova.network.neutron [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.687421] env[61998]: DEBUG nova.network.neutron [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Updating instance_info_cache with network_info: [{"id": "089c550f-d232-4727-b576-df921335d3e4", "address": "fa:16:3e:93:ad:b8", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap089c550f-d2", "ovs_interfaceid": "089c550f-d232-4727-b576-df921335d3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.722968] env[61998]: INFO nova.compute.manager [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Took 30.65 seconds to build instance. [ 873.791260] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388624, 'name': Rename_Task, 'duration_secs': 0.375208} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.791582] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.791975] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a54918e0-c3c4-4cc6-a958-cd73df2a5c8e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.799440] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 873.799440] env[61998]: value = "task-1388625" [ 873.799440] env[61998]: _type = "Task" [ 873.799440] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.807616] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.830802] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.831395] env[61998]: DEBUG nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 873.834366] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.120s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.834686] env[61998]: DEBUG nova.objects.instance [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lazy-loading 'resources' on Instance uuid b3a3bb81-843b-4227-bebf-a8079f98c0f8 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.873941] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52042d83-d1a7-bb5e-b169-72fe82bbcdd7, 'name': SearchDatastore_Task, 'duration_secs': 0.009395} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.874432] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.874832] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 1206c5c7-3eae-437b-9386-f3af937b8795/1206c5c7-3eae-437b-9386-f3af937b8795.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.875129] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74e94a03-c386-4db5-9256-4c617ff4ae03 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.882065] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 873.882065] env[61998]: value = "task-1388626" [ 873.882065] env[61998]: _type = "Task" [ 873.882065] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.891259] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.190210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.190578] env[61998]: DEBUG nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Instance network_info: |[{"id": "089c550f-d232-4727-b576-df921335d3e4", "address": "fa:16:3e:93:ad:b8", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap089c550f-d2", "ovs_interfaceid": "089c550f-d232-4727-b576-df921335d3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 874.191032] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:ad:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '089c550f-d232-4727-b576-df921335d3e4', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.199280] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Creating folder: Project (82b8854f80cf48628167fd6f678d7dd7). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.199588] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d4a019f-2c36-4860-9db7-304032404b1d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.211494] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Created folder: Project (82b8854f80cf48628167fd6f678d7dd7) in parent group-v294665. [ 874.211673] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Creating folder: Instances. Parent ref: group-v294737. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.211930] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ff041b5-79da-476f-bb1d-5a2f9fc59dc9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.220917] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Created folder: Instances in parent group-v294737. [ 874.221193] env[61998]: DEBUG oslo.service.loopingcall [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.221399] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.221612] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ba35aae-66f5-4eaa-a1a6-608fc77ace52 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.237104] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f8e7a3de-979c-40ab-87d1-0ec576943ce1 tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.159s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.242240] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.242240] env[61998]: value = "task-1388629" [ 874.242240] env[61998]: _type = "Task" [ 874.242240] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.251185] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388629, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.311052] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388625, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.337782] env[61998]: DEBUG nova.compute.utils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.339309] env[61998]: DEBUG nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 874.339492] env[61998]: DEBUG nova.network.neutron [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.393755] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48278} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.398474] env[61998]: DEBUG nova.policy [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '040b68bdb3d641569a1e34e71642721d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e3f7b7c926e4f778c23d04ad9598eb5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 874.400130] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 1206c5c7-3eae-437b-9386-f3af937b8795/1206c5c7-3eae-437b-9386-f3af937b8795.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.401061] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.401153] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-efa0c29e-4931-499c-949b-0b3225bef611 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.408470] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 874.408470] env[61998]: value = "task-1388630" [ 874.408470] env[61998]: _type = "Task" [ 874.408470] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.422607] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388630, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.428390] env[61998]: DEBUG nova.compute.manager [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Received event network-changed-a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 874.428612] env[61998]: DEBUG nova.compute.manager [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Refreshing instance network info cache due to event network-changed-a1959129-1e34-4499-b312-c6580996cd63. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 874.428826] env[61998]: DEBUG oslo_concurrency.lockutils [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] Acquiring lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.428963] env[61998]: DEBUG oslo_concurrency.lockutils [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] Acquired lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.429180] env[61998]: DEBUG nova.network.neutron [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Refreshing network info cache for port a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.626730] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e0d190-9d20-46af-b0c7-3fa4ec68a77a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.634770] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea065f0-b827-4ff3-acfb-ab0bbe279d06 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.669105] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43360151-e160-4694-8061-ed31b8603f76 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.677229] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2258ee-7f9f-4d3b-b5fb-0eb73924a3da {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.691164] env[61998]: DEBUG nova.compute.provider_tree [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.747968] env[61998]: DEBUG nova.network.neutron [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Successfully created port: c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.757894] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388629, 'name': CreateVM_Task, 'duration_secs': 0.389199} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.757894] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.758604] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.758768] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.759107] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 874.759363] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b52eba4d-b657-4172-bada-afd79ab882d0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.764264] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 874.764264] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b37609-9b39-2e64-7caf-81f0b067225c" [ 874.764264] env[61998]: _type = "Task" [ 874.764264] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.776841] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b37609-9b39-2e64-7caf-81f0b067225c, 'name': SearchDatastore_Task, 'duration_secs': 0.009287} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.777175] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.777435] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.777691] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.777860] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.778079] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.778481] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d05752ed-6b8a-49ec-9bb7-bb7eecfa939a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.794696] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.794894] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.795764] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-426a0519-0304-482d-a4c9-435554b4a16e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.801283] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 874.801283] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]525a767b-198c-5996-d941-11904b7e7057" [ 874.801283] env[61998]: _type = "Task" [ 874.801283] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.812921] env[61998]: DEBUG oslo_vmware.api [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388625, 'name': PowerOnVM_Task, 'duration_secs': 0.845974} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.818021] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.818021] env[61998]: INFO nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Took 8.85 seconds to spawn the instance on the hypervisor. [ 874.818021] env[61998]: DEBUG nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 874.818021] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525a767b-198c-5996-d941-11904b7e7057, 'name': SearchDatastore_Task, 'duration_secs': 0.008509} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.818219] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc9910f-74e3-49cd-a748-e5d0604222d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.821820] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2ab5939-aff6-436e-bde9-03c793feb8dd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.830848] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 874.830848] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52170d6f-138d-969c-7a7e-93ef212c2f3a" [ 874.830848] env[61998]: _type = "Task" [ 874.830848] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.838996] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52170d6f-138d-969c-7a7e-93ef212c2f3a, 'name': SearchDatastore_Task, 'duration_secs': 0.007978} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.839235] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.839478] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 4c15a9f8-4dc2-48e1-a697-03298adb8527/4c15a9f8-4dc2-48e1-a697-03298adb8527.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 874.839704] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0dfee3ff-23c9-42fe-bf01-05f6d20a3740 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.844361] env[61998]: DEBUG nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 874.848169] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 874.848169] env[61998]: value = "task-1388631" [ 874.848169] env[61998]: _type = "Task" [ 874.848169] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.856040] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.917964] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388630, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074518} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.918256] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.919020] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02916c56-28d3-46f1-aa1c-ed3c7ed2abf0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.945795] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 1206c5c7-3eae-437b-9386-f3af937b8795/1206c5c7-3eae-437b-9386-f3af937b8795.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.948220] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55402c33-bc68-48ef-a410-69e6522daa1f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.968186] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 874.968186] env[61998]: value = "task-1388632" [ 874.968186] env[61998]: _type = "Task" [ 874.968186] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.977925] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388632, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.194867] env[61998]: DEBUG nova.scheduler.client.report [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 875.321134] env[61998]: DEBUG nova.network.neutron [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Updated VIF entry in instance network info cache for port a1959129-1e34-4499-b312-c6580996cd63. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.321453] env[61998]: DEBUG nova.network.neutron [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Updating instance_info_cache with network_info: [{"id": "a1959129-1e34-4499-b312-c6580996cd63", "address": "fa:16:3e:82:ed:f8", "network": {"id": "2d9ba45d-5e99-4c38-80b2-6b9ca58acf95", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1139375101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2b30526cfa64ab4802e1385aeaf9103", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1959129-1e", "ovs_interfaceid": "a1959129-1e34-4499-b312-c6580996cd63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.341818] env[61998]: INFO nova.compute.manager [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Took 31.37 seconds to build instance. [ 875.361902] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488627} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.362460] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 4c15a9f8-4dc2-48e1-a697-03298adb8527/4c15a9f8-4dc2-48e1-a697-03298adb8527.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 875.362460] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.363899] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45602d35-2c9b-48ae-a66c-363f7a6f1a41 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.368655] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 875.368655] env[61998]: value = "task-1388633" [ 875.368655] env[61998]: _type = "Task" [ 875.368655] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.376870] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.477919] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388632, 'name': ReconfigVM_Task, 'duration_secs': 0.352896} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.478228] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 1206c5c7-3eae-437b-9386-f3af937b8795/1206c5c7-3eae-437b-9386-f3af937b8795.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.478871] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df69dac3-6946-4847-b8a5-6631306d6ddd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.484632] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 875.484632] env[61998]: value = "task-1388634" [ 875.484632] env[61998]: _type = "Task" [ 875.484632] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.492294] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388634, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.662575] env[61998]: DEBUG nova.compute.manager [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Received event network-changed-2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 875.663104] env[61998]: DEBUG nova.compute.manager [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Refreshing instance network info cache due to event network-changed-2c555663-7a18-4eba-9038-f975654d0400. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 875.663336] env[61998]: DEBUG oslo_concurrency.lockutils [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] Acquiring lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.663481] env[61998]: DEBUG oslo_concurrency.lockutils [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] Acquired lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.663641] env[61998]: DEBUG nova.network.neutron [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Refreshing network info cache for port 2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.702136] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.868s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.704460] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.904s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.704721] env[61998]: DEBUG nova.objects.instance [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lazy-loading 'resources' on Instance uuid c84d15dc-0ef2-44e2-b579-104678a6bb07 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.727969] env[61998]: INFO nova.scheduler.client.report [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted allocations for instance b3a3bb81-843b-4227-bebf-a8079f98c0f8 [ 875.824036] env[61998]: DEBUG oslo_concurrency.lockutils [req-0c271aee-8c74-4bfa-b232-d73285694d4c req-0c581c9e-916f-4afb-838f-15729b9ff045 service nova] Releasing lock "refresh_cache-4ca7de74-3bcb-4da0-a2e1-573584467cc9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.844186] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c73fe2d9-eea6-4ed2-9ef8-83756def492c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.248s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.857537] env[61998]: DEBUG nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 875.879822] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060339} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.880299] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.880858] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454a9d81-b360-455a-944d-661fe2db86ae {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.886091] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 875.886315] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 875.886472] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.886652] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 875.886800] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.886972] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 875.887879] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 875.887879] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 875.887879] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 875.887879] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 875.887879] env[61998]: DEBUG nova.virt.hardware [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 875.888647] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aec6927-383d-4b83-a182-2c68c935cf46 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.910307] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 4c15a9f8-4dc2-48e1-a697-03298adb8527/4c15a9f8-4dc2-48e1-a697-03298adb8527.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.911229] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-981c7301-13fc-4ca0-9c7c-acfb7eff51af {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.929331] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aad9d1d-7fe2-4f0e-9dc9-9bb99acc2bfe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.934940] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 875.934940] env[61998]: value = "task-1388635" [ 875.934940] env[61998]: _type = "Task" [ 875.934940] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.952608] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388635, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.994882] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388634, 'name': Rename_Task, 'duration_secs': 0.134431} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.995198] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.995481] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e007f70c-aef8-4c53-b448-a5e75f764224 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.001845] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 876.001845] env[61998]: value = "task-1388636" [ 876.001845] env[61998]: _type = "Task" [ 876.001845] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.009521] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388636, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.235924] env[61998]: DEBUG oslo_concurrency.lockutils [None req-be7230ec-c09a-4b58-a583-c2fc0e22338d tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "b3a3bb81-843b-4227-bebf-a8079f98c0f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.737s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.333111] env[61998]: DEBUG nova.network.neutron [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Successfully updated port: c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.447895] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388635, 'name': ReconfigVM_Task, 'duration_secs': 0.260761} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.450549] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 4c15a9f8-4dc2-48e1-a697-03298adb8527/4c15a9f8-4dc2-48e1-a697-03298adb8527.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.451445] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d31c590-8132-4664-b3b8-0284b3b0cdbb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.459243] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 876.459243] env[61998]: value = "task-1388637" [ 876.459243] env[61998]: _type = "Task" [ 876.459243] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.472408] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388637, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.473703] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb8b83f-92fa-4f1c-8d83-3440c6512cad {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.483299] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c772f3-d48a-447b-820e-a4ac99ebd69f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.517200] env[61998]: DEBUG nova.network.neutron [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updated VIF entry in instance network info cache for port 2c555663-7a18-4eba-9038-f975654d0400. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.517600] env[61998]: DEBUG nova.network.neutron [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updating instance_info_cache with network_info: [{"id": "2c555663-7a18-4eba-9038-f975654d0400", "address": "fa:16:3e:ea:aa:ac", "network": {"id": "017a5b7a-4001-4ccd-8656-94c62da1b694", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1740525260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f7b0f9307923448bbd7b245df28f97f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c555663-7a", "ovs_interfaceid": "2c555663-7a18-4eba-9038-f975654d0400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.522200] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0026130-080d-486c-b786-203052e16c22 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.531985] env[61998]: DEBUG oslo_vmware.api [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388636, 'name': PowerOnVM_Task, 'duration_secs': 0.485019} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.533920] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033e3b8e-2498-46ca-80cf-fa6c3143f37a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.537828] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.538087] env[61998]: INFO nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Took 8.25 seconds to spawn the instance on the hypervisor. [ 876.538275] env[61998]: DEBUG nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 876.539253] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cae8642-80fb-48ca-9fb1-bfcea370f8e9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.552052] env[61998]: DEBUG nova.compute.provider_tree [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.834244] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.834509] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquired lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.834618] env[61998]: DEBUG nova.network.neutron [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.971441] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388637, 'name': Rename_Task, 'duration_secs': 0.137364} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.971697] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.971935] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-166514c2-1f9c-4088-b6d3-06da384b2bde {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.977836] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 876.977836] env[61998]: value = "task-1388638" [ 876.977836] env[61998]: _type = "Task" [ 876.977836] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.985231] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.026099] env[61998]: DEBUG oslo_concurrency.lockutils [req-a1729564-f858-4c98-b11b-e34db88d4414 req-3f579749-fc8b-4d92-922e-c8f6ada62c65 service nova] Releasing lock "refresh_cache-0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.058463] env[61998]: DEBUG nova.scheduler.client.report [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 877.071151] env[61998]: INFO nova.compute.manager [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Took 32.16 seconds to build instance. [ 877.371687] env[61998]: DEBUG nova.network.neutron [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.489110] env[61998]: DEBUG oslo_vmware.api [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388638, 'name': PowerOnVM_Task, 'duration_secs': 0.446842} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.489386] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.489588] env[61998]: INFO nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Took 6.86 seconds to spawn the instance on the hypervisor. [ 877.489768] env[61998]: DEBUG nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 877.490557] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa26761-cdeb-4992-8632-b47a27c1a21b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.526522] env[61998]: DEBUG nova.network.neutron [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.566221] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.568488] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.745s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.570370] env[61998]: INFO nova.compute.claims [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.573040] env[61998]: DEBUG oslo_concurrency.lockutils [None req-30956bf8-4724-4ef3-acf6-cd544bbc562b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "1206c5c7-3eae-437b-9386-f3af937b8795" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.845s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.588763] env[61998]: INFO nova.scheduler.client.report [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Deleted allocations for instance c84d15dc-0ef2-44e2-b579-104678a6bb07 [ 877.686015] env[61998]: DEBUG nova.compute.manager [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received event network-vif-plugged-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 877.686274] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] Acquiring lock "b9c5feec-7bfd-470e-9833-b45403195e83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.686483] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] Lock "b9c5feec-7bfd-470e-9833-b45403195e83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.686678] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] Lock "b9c5feec-7bfd-470e-9833-b45403195e83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.686821] env[61998]: DEBUG nova.compute.manager [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] No waiting events found dispatching network-vif-plugged-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 877.686980] env[61998]: WARNING nova.compute.manager [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received unexpected event network-vif-plugged-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 for instance with vm_state building and task_state spawning. [ 877.687151] env[61998]: DEBUG nova.compute.manager [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 877.687301] env[61998]: DEBUG nova.compute.manager [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing instance network info cache due to event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 877.687506] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] Acquiring lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.008610] env[61998]: INFO nova.compute.manager [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Took 30.23 seconds to build instance. [ 878.028691] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Releasing lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.028984] env[61998]: DEBUG nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Instance network_info: |[{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 878.029282] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] Acquired lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.029459] env[61998]: DEBUG nova.network.neutron [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.030702] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:38:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9c4edd5-d88e-4996-afea-00130ace0dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0165176-8b9e-4fb8-ba3f-c8b58e45c287', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.038691] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Creating folder: Project (2e3f7b7c926e4f778c23d04ad9598eb5). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.039583] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-043ffaba-6e59-4791-a2c1-ecd4b0e1a9f9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.051561] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Created folder: Project (2e3f7b7c926e4f778c23d04ad9598eb5) in parent group-v294665. [ 878.051745] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Creating folder: Instances. Parent ref: group-v294740. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.051969] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d76b9574-2f5b-4256-aed0-1f915a3254dc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.061091] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Created folder: Instances in parent group-v294740. [ 878.061321] env[61998]: DEBUG oslo.service.loopingcall [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.061502] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.061698] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1a789f0-3aa8-4f94-af13-2f676d2751c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.083665] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.083665] env[61998]: value = "task-1388641" [ 878.083665] env[61998]: _type = "Task" [ 878.083665] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.091490] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388641, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.096576] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e491bdac-a4dd-49a7-91f3-9d24fea3dfde tempest-ImagesTestJSON-1194127224 tempest-ImagesTestJSON-1194127224-project-member] Lock "c84d15dc-0ef2-44e2-b579-104678a6bb07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.392s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.512654] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7795c4f1-cb25-49c4-877d-9e522b56d8a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.354s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.593154] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388641, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.773075] env[61998]: DEBUG nova.network.neutron [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updated VIF entry in instance network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 878.773075] env[61998]: DEBUG nova.network.neutron [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.821607] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd14b83-76a5-4151-b0d4-9216c8901f1c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.829222] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10148730-21d5-48f4-8712-9d9fcecb1efd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.860812] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d152a108-7a5b-4800-a64c-054ed7ba209e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.868309] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55623031-0ba3-46c5-b276-d8363e14ba98 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.881410] env[61998]: DEBUG nova.compute.provider_tree [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.093888] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388641, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.121771] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "1206c5c7-3eae-437b-9386-f3af937b8795" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.122060] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "1206c5c7-3eae-437b-9386-f3af937b8795" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.122279] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "1206c5c7-3eae-437b-9386-f3af937b8795-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.122464] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "1206c5c7-3eae-437b-9386-f3af937b8795-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.122676] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "1206c5c7-3eae-437b-9386-f3af937b8795-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.124841] env[61998]: INFO nova.compute.manager [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Terminating instance [ 879.126696] env[61998]: DEBUG nova.compute.manager [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 879.126890] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.127735] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577a72fd-e6b3-46b1-8701-fedd103930f3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.135198] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.135404] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-341faa6d-8c3d-454f-9264-1ee5c3830a39 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.143310] env[61998]: DEBUG oslo_vmware.api [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 879.143310] env[61998]: value = "task-1388642" [ 879.143310] env[61998]: _type = "Task" [ 879.143310] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.151760] env[61998]: DEBUG oslo_vmware.api [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.200586] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.200899] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.201101] env[61998]: INFO nova.compute.manager [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Shelving [ 879.275411] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba12437-8ccb-4bfd-a772-82797e13a78c req-947c61f5-9490-4a95-b6e4-c7f85a6f70ca service nova] Releasing lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.384850] env[61998]: DEBUG nova.scheduler.client.report [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 879.594450] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388641, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.652905] env[61998]: DEBUG oslo_vmware.api [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388642, 'name': PowerOffVM_Task, 'duration_secs': 0.277358} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.653194] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.653369] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.653645] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fa8b199-a9b0-4bcb-8370-709fe21c655d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.721413] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.721645] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.721992] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Deleting the datastore file [datastore2] 1206c5c7-3eae-437b-9386-f3af937b8795 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.722336] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a71109c-77cb-46b3-8b54-ad6ba6c31858 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.729007] env[61998]: DEBUG oslo_vmware.api [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for the task: (returnval){ [ 879.729007] env[61998]: value = "task-1388644" [ 879.729007] env[61998]: _type = "Task" [ 879.729007] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.738135] env[61998]: DEBUG oslo_vmware.api [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.890452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.891804] env[61998]: DEBUG nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 879.893853] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.668s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.893853] env[61998]: DEBUG nova.objects.instance [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lazy-loading 'resources' on Instance uuid f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.094929] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388641, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.213198] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.213495] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-255a3ced-c1ad-4faf-adc0-7f3d198e1344 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.220800] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 880.220800] env[61998]: value = "task-1388645" [ 880.220800] env[61998]: _type = "Task" [ 880.220800] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.229977] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.237463] env[61998]: DEBUG oslo_vmware.api [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Task: {'id': task-1388644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261238} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.237693] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.237900] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.238059] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.238240] env[61998]: INFO nova.compute.manager [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Took 1.11 seconds to destroy the instance on the hypervisor. [ 880.238465] env[61998]: DEBUG oslo.service.loopingcall [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.238652] env[61998]: DEBUG nova.compute.manager [-] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 880.238745] env[61998]: DEBUG nova.network.neutron [-] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.397518] env[61998]: DEBUG nova.compute.utils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.403360] env[61998]: DEBUG nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 880.403360] env[61998]: DEBUG nova.network.neutron [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 880.484106] env[61998]: DEBUG nova.policy [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f155bbfca47547c2bf745811003ffcec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f49104f21d7147328bcc8edee8d3cdb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 880.565329] env[61998]: DEBUG nova.compute.manager [req-0c7b138a-59e4-4963-8f8c-9689ca2f55f1 req-7ea92d19-9c7b-4d37-9771-050160f50fea service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Received event network-vif-deleted-c8d6d30a-6ea6-4e25-b740-f859a1482020 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 880.565434] env[61998]: INFO nova.compute.manager [req-0c7b138a-59e4-4963-8f8c-9689ca2f55f1 req-7ea92d19-9c7b-4d37-9771-050160f50fea service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Neutron deleted interface c8d6d30a-6ea6-4e25-b740-f859a1482020; detaching it from the instance and deleting it from the info cache [ 880.565605] env[61998]: DEBUG nova.network.neutron [req-0c7b138a-59e4-4963-8f8c-9689ca2f55f1 req-7ea92d19-9c7b-4d37-9771-050160f50fea service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.598365] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388641, 'name': CreateVM_Task, 'duration_secs': 2.032035} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.598538] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.599212] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.599371] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.599681] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 880.599971] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b422d62c-db8b-48bf-8afb-023092ab9777 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.606801] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 880.606801] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]526b8a4f-3219-7002-7da1-9973409b341a" [ 880.606801] env[61998]: _type = "Task" [ 880.606801] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.615093] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526b8a4f-3219-7002-7da1-9973409b341a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.626208] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49d9797-c97e-4776-8e2d-aa2df78a1bd5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.632872] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4029894e-8b11-45a5-80fe-d5865a8f8da5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.663667] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b9c907-5b76-4dea-983d-856d2ce7ae04 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.672247] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca6676a-008b-4fcd-94eb-2b9c2db4897d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.685530] env[61998]: DEBUG nova.compute.provider_tree [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.730302] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.792583] env[61998]: DEBUG nova.network.neutron [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Successfully created port: 1cb05d6f-fb33-4e35-a7a7-862b3b11b653 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.902632] env[61998]: DEBUG nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 881.040122] env[61998]: DEBUG nova.network.neutron [-] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.069270] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0986839c-bad2-4cd1-bb3a-489a3f848d2e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.078772] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c17cea-9348-46de-b789-25eb0dcb7699 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.106229] env[61998]: DEBUG nova.compute.manager [req-0c7b138a-59e4-4963-8f8c-9689ca2f55f1 req-7ea92d19-9c7b-4d37-9771-050160f50fea service nova] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Detach interface failed, port_id=c8d6d30a-6ea6-4e25-b740-f859a1482020, reason: Instance 1206c5c7-3eae-437b-9386-f3af937b8795 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 881.116313] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526b8a4f-3219-7002-7da1-9973409b341a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.188406] env[61998]: DEBUG nova.scheduler.client.report [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 881.230911] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.543527] env[61998]: INFO nova.compute.manager [-] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Took 1.30 seconds to deallocate network for instance. [ 881.618606] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526b8a4f-3219-7002-7da1-9973409b341a, 'name': SearchDatastore_Task, 'duration_secs': 0.860732} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.620553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.620553] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.620553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.620553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.620748] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.620748] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7343c16-5793-46c4-aa3c-85fddb595bd4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.627927] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.628125] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 881.628843] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17245427-502a-4455-bf1c-0acd15dbcffb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.633818] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 881.633818] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]527c4fb3-6505-a147-a3fe-925802e4d277" [ 881.633818] env[61998]: _type = "Task" [ 881.633818] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.641680] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]527c4fb3-6505-a147-a3fe-925802e4d277, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.693015] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.799s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.695449] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.469s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.695751] env[61998]: DEBUG nova.objects.instance [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lazy-loading 'resources' on Instance uuid d780cbdc-8838-42bf-8736-bc2dd60e659c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.719346] env[61998]: INFO nova.scheduler.client.report [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleted allocations for instance f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83 [ 881.736781] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388645, 'name': PowerOffVM_Task, 'duration_secs': 1.477693} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.736781] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.736781] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f683c2b-28cc-45dc-b31a-f403821b0eeb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.754374] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55e9561-c11b-4b45-bd83-8500150c75b8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.911409] env[61998]: DEBUG nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 881.944401] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.944670] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.944883] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.945122] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.945302] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.945464] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.945679] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.945868] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.946087] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.946270] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.946457] env[61998]: DEBUG nova.virt.hardware [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.947523] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a76e60-7796-40b7-a1b9-65fcad7371f6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.957551] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890e819c-d3df-4a48-a59c-1262e9f57ec4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.050780] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.091767] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.092058] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.092246] env[61998]: INFO nova.compute.manager [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Shelving [ 882.133667] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "dadd9985-bca3-4207-927f-9490e0ae3f10" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.134439] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.148957] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]527c4fb3-6505-a147-a3fe-925802e4d277, 'name': SearchDatastore_Task, 'duration_secs': 0.008579} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.150280] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d691fad-323f-4666-aabe-7904cfe4ac75 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.157621] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 882.157621] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52528b75-4396-a92d-0a20-9b7ae7dea13d" [ 882.157621] env[61998]: _type = "Task" [ 882.157621] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.167090] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52528b75-4396-a92d-0a20-9b7ae7dea13d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.229647] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cd8a7b53-7c35-4258-a46c-461fd18d8d58 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.932s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.266229] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Creating Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 882.266719] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e8471cd5-7739-41b7-b30b-108b84046b04 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.274354] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 882.274354] env[61998]: value = "task-1388646" [ 882.274354] env[61998]: _type = "Task" [ 882.274354] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.286323] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388646, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.403273] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3427c6ab-c6c6-48d3-8844-948ac2756d04 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.409557] env[61998]: DEBUG nova.compute.manager [req-0941cb11-9117-4cff-9e1d-bc93eb133060 req-1f09ed60-a099-4519-a7f9-dd63e592bf09 service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Received event network-vif-plugged-1cb05d6f-fb33-4e35-a7a7-862b3b11b653 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 882.409983] env[61998]: DEBUG oslo_concurrency.lockutils [req-0941cb11-9117-4cff-9e1d-bc93eb133060 req-1f09ed60-a099-4519-a7f9-dd63e592bf09 service nova] Acquiring lock "23265b26-7579-4514-a172-8cf2ec124ec6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.410235] env[61998]: DEBUG oslo_concurrency.lockutils [req-0941cb11-9117-4cff-9e1d-bc93eb133060 req-1f09ed60-a099-4519-a7f9-dd63e592bf09 service nova] Lock "23265b26-7579-4514-a172-8cf2ec124ec6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.410401] env[61998]: DEBUG oslo_concurrency.lockutils [req-0941cb11-9117-4cff-9e1d-bc93eb133060 req-1f09ed60-a099-4519-a7f9-dd63e592bf09 service nova] Lock "23265b26-7579-4514-a172-8cf2ec124ec6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.410565] env[61998]: DEBUG nova.compute.manager [req-0941cb11-9117-4cff-9e1d-bc93eb133060 req-1f09ed60-a099-4519-a7f9-dd63e592bf09 service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] No waiting events found dispatching network-vif-plugged-1cb05d6f-fb33-4e35-a7a7-862b3b11b653 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 882.412997] env[61998]: WARNING nova.compute.manager [req-0941cb11-9117-4cff-9e1d-bc93eb133060 req-1f09ed60-a099-4519-a7f9-dd63e592bf09 service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Received unexpected event network-vif-plugged-1cb05d6f-fb33-4e35-a7a7-862b3b11b653 for instance with vm_state building and task_state spawning. [ 882.420459] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6262a02-628d-4468-96c5-b18d68f12fe2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.457884] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140863f5-87ef-4819-b56b-47e0589abe30 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.466515] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb835bb1-cab2-442c-b5bd-5498497be1fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.481845] env[61998]: DEBUG nova.compute.provider_tree [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.517506] env[61998]: DEBUG nova.network.neutron [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Successfully updated port: 1cb05d6f-fb33-4e35-a7a7-862b3b11b653 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.641558] env[61998]: DEBUG nova.compute.utils [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 882.670048] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52528b75-4396-a92d-0a20-9b7ae7dea13d, 'name': SearchDatastore_Task, 'duration_secs': 0.012027} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.670334] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.670599] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/b9c5feec-7bfd-470e-9833-b45403195e83.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.670880] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f06aaf91-9d4d-466a-9f42-490bb2105130 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.680008] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 882.680008] env[61998]: value = "task-1388647" [ 882.680008] env[61998]: _type = "Task" [ 882.680008] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.688716] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.789122] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388646, 'name': CreateSnapshot_Task, 'duration_secs': 0.411493} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.789122] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Created Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 882.789591] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b667d1b7-f47a-42c2-bfda-e74e2762870d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.986388] env[61998]: DEBUG nova.scheduler.client.report [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 883.020479] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-23265b26-7579-4514-a172-8cf2ec124ec6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.020630] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-23265b26-7579-4514-a172-8cf2ec124ec6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.020783] env[61998]: DEBUG nova.network.neutron [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.104459] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.104459] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-483e7873-054e-4768-a483-a458ad98ca03 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.114480] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 883.114480] env[61998]: value = "task-1388648" [ 883.114480] env[61998]: _type = "Task" [ 883.114480] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.124178] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.145432] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.190435] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388647, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.310514] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Creating linked-clone VM from snapshot {{(pid=61998) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 883.310846] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-408194df-8625-48a0-98ac-97d1649fa2b4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.319874] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 883.319874] env[61998]: value = "task-1388649" [ 883.319874] env[61998]: _type = "Task" [ 883.319874] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.328732] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.491380] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.796s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.494829] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.139s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.508300] env[61998]: INFO nova.scheduler.client.report [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Deleted allocations for instance d780cbdc-8838-42bf-8736-bc2dd60e659c [ 883.556529] env[61998]: DEBUG nova.network.neutron [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.623694] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388648, 'name': PowerOffVM_Task, 'duration_secs': 0.207306} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.623944] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.624760] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a5d4b6-6fd2-4a4d-a0f3-eab52a4a1d1b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.649444] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45227530-3c45-43f8-ae16-7c542ca09be2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.692124] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388647, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529181} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.697017] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/b9c5feec-7bfd-470e-9833-b45403195e83.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.697017] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.697017] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e18dc7a-d267-4f1b-bcfc-cfa689a04506 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.703796] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 883.703796] env[61998]: value = "task-1388650" [ 883.703796] env[61998]: _type = "Task" [ 883.703796] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.713257] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.742709] env[61998]: DEBUG nova.network.neutron [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Updating instance_info_cache with network_info: [{"id": "1cb05d6f-fb33-4e35-a7a7-862b3b11b653", "address": "fa:16:3e:9b:ef:32", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb05d6f-fb", "ovs_interfaceid": "1cb05d6f-fb33-4e35-a7a7-862b3b11b653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.831413] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.901264] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "2914460e-39e5-495b-96d8-b3580d0318d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.901493] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "2914460e-39e5-495b-96d8-b3580d0318d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.017035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a7d5122e-71ac-4c68-9e16-7481ccb4ddd0 tempest-MultipleCreateTestJSON-1503300055 tempest-MultipleCreateTestJSON-1503300055-project-member] Lock "d780cbdc-8838-42bf-8736-bc2dd60e659c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.725s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.162108] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Creating Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 884.162878] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d61b4c6e-07ad-40c9-88a0-5134d07c0575 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.171139] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 884.171139] env[61998]: value = "task-1388651" [ 884.171139] env[61998]: _type = "Task" [ 884.171139] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.179393] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388651, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.213761] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066592} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.214079] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.215037] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7472c86b-7826-423d-bcd4-ae27982b6467 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.229627] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "dadd9985-bca3-4207-927f-9490e0ae3f10" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.229627] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.229851] env[61998]: INFO nova.compute.manager [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Attaching volume f6cad4b4-0a76-4bbc-8faf-8797e51710f7 to /dev/sdb [ 884.240406] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/b9c5feec-7bfd-470e-9833-b45403195e83.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.240946] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c253436c-8e47-446d-85fd-8cfd8e41d329 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.259053] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-23265b26-7579-4514-a172-8cf2ec124ec6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.259382] env[61998]: DEBUG nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Instance network_info: |[{"id": "1cb05d6f-fb33-4e35-a7a7-862b3b11b653", "address": "fa:16:3e:9b:ef:32", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb05d6f-fb", "ovs_interfaceid": "1cb05d6f-fb33-4e35-a7a7-862b3b11b653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 884.259912] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:ef:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cb05d6f-fb33-4e35-a7a7-862b3b11b653', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.267279] env[61998]: DEBUG oslo.service.loopingcall [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.268014] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.268428] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bcdbf93-9581-4727-9e20-eff0de279497 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.284023] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 884.284023] env[61998]: value = "task-1388652" [ 884.284023] env[61998]: _type = "Task" [ 884.284023] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.289316] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.289316] env[61998]: value = "task-1388653" [ 884.289316] env[61998]: _type = "Task" [ 884.289316] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.293775] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b86ea6d-1e67-4b35-8f91-d592867c7a97 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.296543] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388652, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.307243] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388653, 'name': CreateVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.307243] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c878d6-bf04-4d3b-9d6d-41a4e9d55bb1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.322839] env[61998]: DEBUG nova.virt.block_device [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updating existing volume attachment record: 1a882b6e-a5d4-426b-8199-838ca73a635d {{(pid=61998) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 884.336222] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.404224] env[61998]: DEBUG nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 884.451061] env[61998]: DEBUG nova.compute.manager [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Received event network-changed-1cb05d6f-fb33-4e35-a7a7-862b3b11b653 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 884.451061] env[61998]: DEBUG nova.compute.manager [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Refreshing instance network info cache due to event network-changed-1cb05d6f-fb33-4e35-a7a7-862b3b11b653. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 884.451061] env[61998]: DEBUG oslo_concurrency.lockutils [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] Acquiring lock "refresh_cache-23265b26-7579-4514-a172-8cf2ec124ec6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.451061] env[61998]: DEBUG oslo_concurrency.lockutils [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] Acquired lock "refresh_cache-23265b26-7579-4514-a172-8cf2ec124ec6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.451061] env[61998]: DEBUG nova.network.neutron [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Refreshing network info cache for port 1cb05d6f-fb33-4e35-a7a7-862b3b11b653 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.531854] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 2d0b199f-e0f1-42e0-afb5-e08602aebf01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.532161] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance dadd9985-bca3-4207-927f-9490e0ae3f10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.532206] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance c51f684b-84f0-42b3-acf9-9e8317b10cb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.532526] env[61998]: WARNING nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance ac4a8463-91ba-4061-aa5d-1c72c4f532ce is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 884.532526] env[61998]: WARNING nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance bcb05670-dc58-46be-a4a9-58a260e4132f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 884.532863] env[61998]: WARNING nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance f3089d53-9c8f-4276-8e2e-0518cf29004b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 884.532863] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 4ca7de74-3bcb-4da0-a2e1-573584467cc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.532863] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.533022] env[61998]: WARNING nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 1206c5c7-3eae-437b-9386-f3af937b8795 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 884.533066] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 4c15a9f8-4dc2-48e1-a697-03298adb8527 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.533185] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance b9c5feec-7bfd-470e-9833-b45403195e83 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.533472] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 23265b26-7579-4514-a172-8cf2ec124ec6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 884.682093] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388651, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.805813] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388653, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.810411] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388652, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.833757] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.938517] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.040660] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 2914460e-39e5-495b-96d8-b3580d0318d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 885.040660] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 885.040660] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 885.183461] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388651, 'name': CreateSnapshot_Task, 'duration_secs': 0.8457} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.185957] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Created Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 885.187116] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a10d37f-7c88-492f-a10b-83a8a126633b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.258340] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b66b02-4262-4d0f-b57f-a2f99ec312e9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.266959] env[61998]: DEBUG nova.network.neutron [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Updated VIF entry in instance network info cache for port 1cb05d6f-fb33-4e35-a7a7-862b3b11b653. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 885.267237] env[61998]: DEBUG nova.network.neutron [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Updating instance_info_cache with network_info: [{"id": "1cb05d6f-fb33-4e35-a7a7-862b3b11b653", "address": "fa:16:3e:9b:ef:32", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb05d6f-fb", "ovs_interfaceid": "1cb05d6f-fb33-4e35-a7a7-862b3b11b653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.271328] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03153568-a9d2-4b2e-8e68-76523bced118 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.311096] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ff3545-d460-411a-b6f8-2c94c8785511 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.320813] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388652, 'name': ReconfigVM_Task, 'duration_secs': 0.813255} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.324110] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7857da56-2d9a-4fdf-bfd9-fcc128ee3024 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.327906] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Reconfigured VM instance instance-0000004f to attach disk [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/b9c5feec-7bfd-470e-9833-b45403195e83.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.328503] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388653, 'name': CreateVM_Task, 'duration_secs': 0.615266} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.328920] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a34b3d4-6f11-416e-8638-fd5575b79fc8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.332523] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.333450] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.333816] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.333912] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 885.334578] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-654927d8-1ede-4b20-8306-90cc3563d823 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.346261] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.347493] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.349039] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 885.349039] env[61998]: value = "task-1388657" [ 885.349039] env[61998]: _type = "Task" [ 885.349039] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.352614] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 885.352614] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52a45ffe-90d3-80f8-adef-ba8ec9865e8d" [ 885.352614] env[61998]: _type = "Task" [ 885.352614] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.358662] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388657, 'name': Rename_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.364011] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a45ffe-90d3-80f8-adef-ba8ec9865e8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.709016] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Creating linked-clone VM from snapshot {{(pid=61998) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 885.709357] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-848627ec-97b1-4caf-a63e-b81f2dbb0634 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.717949] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 885.717949] env[61998]: value = "task-1388658" [ 885.717949] env[61998]: _type = "Task" [ 885.717949] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.727587] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388658, 'name': CloneVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.771647] env[61998]: DEBUG oslo_concurrency.lockutils [req-5f4df095-e7e8-4460-96c3-e3ea42b153c0 req-a5b39267-cb04-4b6e-a783-1f8abf4a40ea service nova] Releasing lock "refresh_cache-23265b26-7579-4514-a172-8cf2ec124ec6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.838334] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.850012] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 885.867385] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388657, 'name': Rename_Task, 'duration_secs': 0.131689} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.867639] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a45ffe-90d3-80f8-adef-ba8ec9865e8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.868549] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.868792] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d006eb4d-1337-4e2f-b764-cee85e80a9c4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.874462] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 885.874462] env[61998]: value = "task-1388659" [ 885.874462] env[61998]: _type = "Task" [ 885.874462] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.882668] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388659, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.232575] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388658, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.339960] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.358496] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 886.358821] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.865s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.359218] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.336s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.360015] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.362298] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.660s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.362998] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.365065] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.211s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.365270] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.367348] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.317s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.367564] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.369327] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.431s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.370895] env[61998]: INFO nova.compute.claims [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.389316] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a45ffe-90d3-80f8-adef-ba8ec9865e8d, 'name': SearchDatastore_Task, 'duration_secs': 0.756008} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.393306] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.393493] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.393715] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.393941] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.394157] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.394523] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388659, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.394786] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6b37904-8efd-47d9-b0fc-3d973fa2e27c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.401378] env[61998]: INFO nova.scheduler.client.report [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Deleted allocations for instance 1206c5c7-3eae-437b-9386-f3af937b8795 [ 886.406118] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.406170] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.407119] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf34e98f-59a1-46c2-be6d-a09e77a053cc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.413866] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 886.413866] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5203a8e9-af9b-88b4-2482-d7ff315f2294" [ 886.413866] env[61998]: _type = "Task" [ 886.413866] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.418435] env[61998]: INFO nova.scheduler.client.report [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Deleted allocations for instance bcb05670-dc58-46be-a4a9-58a260e4132f [ 886.423158] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5203a8e9-af9b-88b4-2482-d7ff315f2294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.433235] env[61998]: INFO nova.scheduler.client.report [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted allocations for instance ac4a8463-91ba-4061-aa5d-1c72c4f532ce [ 886.439287] env[61998]: INFO nova.scheduler.client.report [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Deleted allocations for instance f3089d53-9c8f-4276-8e2e-0518cf29004b [ 886.730130] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388658, 'name': CloneVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.841117] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.890834] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "aaee1558-f98b-4006-93b6-69434c78e79c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.891300] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "aaee1558-f98b-4006-93b6-69434c78e79c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.898029] env[61998]: DEBUG oslo_vmware.api [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388659, 'name': PowerOnVM_Task, 'duration_secs': 0.717669} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.898796] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.899445] env[61998]: INFO nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Took 11.04 seconds to spawn the instance on the hypervisor. [ 886.899445] env[61998]: DEBUG nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 886.900194] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd22c33c-20b6-429c-b54b-48e7a15f32a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.914087] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a929304-5c16-4069-92e5-216097d0695b tempest-ServerTagsTestJSON-323916211 tempest-ServerTagsTestJSON-323916211-project-member] Lock "1206c5c7-3eae-437b-9386-f3af937b8795" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.792s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.924373] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5203a8e9-af9b-88b4-2482-d7ff315f2294, 'name': SearchDatastore_Task, 'duration_secs': 0.008997} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.925368] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e383b692-b660-4008-82cb-279917f26114 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.930553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1a6c95e4-33b5-465e-a90d-51318f8b1427 tempest-ServerAddressesNegativeTestJSON-1103742873 tempest-ServerAddressesNegativeTestJSON-1103742873-project-member] Lock "bcb05670-dc58-46be-a4a9-58a260e4132f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.210s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.934891] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 886.934891] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52f94fce-43cc-27d9-ce5c-abb1923eaa6c" [ 886.934891] env[61998]: _type = "Task" [ 886.934891] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.945661] env[61998]: DEBUG oslo_concurrency.lockutils [None req-09281c41-9838-4ca4-8931-3f6971757cc4 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "ac4a8463-91ba-4061-aa5d-1c72c4f532ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.614s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.950790] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b121340c-cce1-46d5-9479-51706af92b54 tempest-ServersNegativeTestMultiTenantJSON-994910868 tempest-ServersNegativeTestMultiTenantJSON-994910868-project-member] Lock "f3089d53-9c8f-4276-8e2e-0518cf29004b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.740s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.957424] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52f94fce-43cc-27d9-ce5c-abb1923eaa6c, 'name': SearchDatastore_Task, 'duration_secs': 0.009477} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.959459] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.959459] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.959631] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8efcce0f-183a-4fd9-bc80-4e4e0d50564c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.969489] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 886.969489] env[61998]: value = "task-1388660" [ 886.969489] env[61998]: _type = "Task" [ 886.969489] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.978910] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.232591] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388658, 'name': CloneVM_Task, 'duration_secs': 1.106912} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.233085] env[61998]: INFO nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Created linked-clone VM from snapshot [ 887.233682] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6c8f92-d712-4262-8070-882676f4269f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.244380] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Uploading image 45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af {{(pid=61998) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 887.275490] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 887.275490] env[61998]: value = "vm-294749" [ 887.275490] env[61998]: _type = "VirtualMachine" [ 887.275490] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 887.275800] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-79803051-5fe5-4fcf-b770-04c2c0d487d5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.286164] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease: (returnval){ [ 887.286164] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd40b9-b193-5774-6c41-2cd6edae3d44" [ 887.286164] env[61998]: _type = "HttpNfcLease" [ 887.286164] env[61998]: } obtained for exporting VM: (result){ [ 887.286164] env[61998]: value = "vm-294749" [ 887.286164] env[61998]: _type = "VirtualMachine" [ 887.286164] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 887.286497] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the lease: (returnval){ [ 887.286497] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd40b9-b193-5774-6c41-2cd6edae3d44" [ 887.286497] env[61998]: _type = "HttpNfcLease" [ 887.286497] env[61998]: } to be ready. {{(pid=61998) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 887.294020] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.294020] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd40b9-b193-5774-6c41-2cd6edae3d44" [ 887.294020] env[61998]: _type = "HttpNfcLease" [ 887.294020] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 887.345423] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388649, 'name': CloneVM_Task, 'duration_secs': 3.552471} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.345883] env[61998]: INFO nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Created linked-clone VM from snapshot [ 887.347095] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb6c85e-c9b8-44cc-a064-a2b6a44287e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.361889] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Uploading image 01fdcc19-75c0-4d14-9ab4-17361ee18eeb {{(pid=61998) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 887.396046] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 887.396046] env[61998]: value = "vm-294744" [ 887.396046] env[61998]: _type = "VirtualMachine" [ 887.396046] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 887.396780] env[61998]: DEBUG nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 887.399752] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5ccaf8b6-2057-43fc-ba79-ee176ea2e8af {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.413932] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lease: (returnval){ [ 887.413932] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5235440d-1bd1-3cd8-88fd-6977083a0213" [ 887.413932] env[61998]: _type = "HttpNfcLease" [ 887.413932] env[61998]: } obtained for exporting VM: (result){ [ 887.413932] env[61998]: value = "vm-294744" [ 887.413932] env[61998]: _type = "VirtualMachine" [ 887.413932] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 887.413932] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the lease: (returnval){ [ 887.413932] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5235440d-1bd1-3cd8-88fd-6977083a0213" [ 887.413932] env[61998]: _type = "HttpNfcLease" [ 887.413932] env[61998]: } to be ready. {{(pid=61998) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 887.436688] env[61998]: INFO nova.compute.manager [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Took 31.33 seconds to build instance. [ 887.447311] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.447311] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5235440d-1bd1-3cd8-88fd-6977083a0213" [ 887.447311] env[61998]: _type = "HttpNfcLease" [ 887.447311] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 887.486655] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388660, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500624} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.489687] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.489687] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.489687] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-739eb450-97b3-4e24-955b-8318e87cee6a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.496924] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 887.496924] env[61998]: value = "task-1388663" [ 887.496924] env[61998]: _type = "Task" [ 887.496924] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.513096] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388663, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.628570] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7442f995-e7b9-4175-a3da-9aab634aa2aa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.637281] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a247924c-9f2a-48f0-b590-ed523e3014e4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.670684] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3df25b9-67c2-4881-b469-22fbce29fe51 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.679650] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c422732a-533b-4647-9c41-66aff7d5d683 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.696227] env[61998]: DEBUG nova.compute.provider_tree [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.795355] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.795355] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd40b9-b193-5774-6c41-2cd6edae3d44" [ 887.795355] env[61998]: _type = "HttpNfcLease" [ 887.795355] env[61998]: } is ready. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 887.795650] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 887.795650] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd40b9-b193-5774-6c41-2cd6edae3d44" [ 887.795650] env[61998]: _type = "HttpNfcLease" [ 887.795650] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 887.796414] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8f66c0-657b-45ae-889d-9de08a28d187 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.805935] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5284c69b-8312-72b1-d020-17bf3b72924c/disk-0.vmdk from lease info. {{(pid=61998) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 887.806335] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5284c69b-8312-72b1-d020-17bf3b72924c/disk-0.vmdk for reading. {{(pid=61998) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 887.928372] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.930336] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.930336] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5235440d-1bd1-3cd8-88fd-6977083a0213" [ 887.930336] env[61998]: _type = "HttpNfcLease" [ 887.930336] env[61998]: } is ready. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 887.930604] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 887.930604] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5235440d-1bd1-3cd8-88fd-6977083a0213" [ 887.930604] env[61998]: _type = "HttpNfcLease" [ 887.930604] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 887.931404] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa444d9-872e-4f73-8a79-6e0851d1659d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.940040] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bc60247-1a91-40f1-a9aa-71819a37cd54 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "b9c5feec-7bfd-470e-9833-b45403195e83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.616s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.944760] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1d65f-ade6-d4a7-1863-efbce7182068/disk-0.vmdk from lease info. {{(pid=61998) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 887.944760] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1d65f-ade6-d4a7-1863-efbce7182068/disk-0.vmdk for reading. {{(pid=61998) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 888.016403] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b5524c36-74d8-4a18-a19a-8df85c4a6868 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.018895] env[61998]: INFO nova.compute.manager [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Rescuing [ 888.019149] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.019386] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquired lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.019460] env[61998]: DEBUG nova.network.neutron [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.024173] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388663, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073857} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.025680] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 888.026475] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a14bd9c-7ffe-46b9-8008-ac3611b55770 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.050450] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.054985] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1130ee55-f0c0-40d9-bd0c-7da1d5b8d290 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.071140] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-756f995c-95ce-47c2-9256-c42e449ea0f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.078748] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 888.078748] env[61998]: value = "task-1388664" [ 888.078748] env[61998]: _type = "Task" [ 888.078748] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.096768] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388664, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.202732] env[61998]: DEBUG nova.scheduler.client.report [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 888.290395] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.291602] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.594709] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388664, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.710502] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.725112] env[61998]: DEBUG nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 888.735397] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.805s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.737702] env[61998]: INFO nova.compute.claims [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.795277] env[61998]: DEBUG nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 889.101175] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388664, 'name': ReconfigVM_Task, 'duration_secs': 0.660355} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.101299] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.102134] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd2467de-04fd-4b83-be8b-4574483e60d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.112670] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 889.112670] env[61998]: value = "task-1388666" [ 889.112670] env[61998]: _type = "Task" [ 889.112670] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.126405] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388666, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.235099] env[61998]: DEBUG nova.compute.utils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 889.240019] env[61998]: DEBUG nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 889.240277] env[61998]: DEBUG nova.network.neutron [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 889.341911] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.361211] env[61998]: DEBUG nova.policy [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c27a62fba1a3413eb2631fd8663c4efb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8202a47486224771ae061f4787686ccb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 889.376077] env[61998]: DEBUG nova.network.neutron [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.626165] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388666, 'name': Rename_Task, 'duration_secs': 0.215426} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.627916] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.628168] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5071dd60-fe32-4e59-a2e1-e50741091e58 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.640225] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 889.640225] env[61998]: value = "task-1388667" [ 889.640225] env[61998]: _type = "Task" [ 889.640225] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.655030] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388667, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.747457] env[61998]: DEBUG nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 889.887874] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Releasing lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.904372] env[61998]: DEBUG nova.network.neutron [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Successfully created port: 47933687-23f9-4630-a0bb-9af926699f03 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.000079] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b7455a-3490-485d-99e1-7850836350fa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.012219] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aac099-462c-4be0-9791-48f3e5b01dd9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.880529] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8ddb03-f67f-43b1-aaf9-d3e082dbeace {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.891923] env[61998]: DEBUG oslo_vmware.api [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388667, 'name': PowerOnVM_Task, 'duration_secs': 0.548141} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.893256] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Volume attach. Driver type: vmdk {{(pid=61998) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 890.893580] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294747', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'name': 'volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dadd9985-bca3-4207-927f-9490e0ae3f10', 'attached_at': '', 'detached_at': '', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'serial': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 890.893822] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.894099] env[61998]: INFO nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Took 8.98 seconds to spawn the instance on the hypervisor. [ 890.894309] env[61998]: DEBUG nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 890.895658] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf179579-826a-416e-a4ec-917142476734 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.900173] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e4a85f-b475-48a0-8a77-0e01300da182 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.903489] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a0d708-a079-4522-b34c-cd9f432e4c9e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.916434] env[61998]: DEBUG nova.compute.provider_tree [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.934827] env[61998]: DEBUG nova.scheduler.client.report [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 890.938356] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2092efb-f421-429d-be35-322c01c1e0a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.967109] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7/volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.967798] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-548c890d-85ea-4987-81ea-568c25f82d1d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.988415] env[61998]: DEBUG oslo_vmware.api [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 890.988415] env[61998]: value = "task-1388668" [ 890.988415] env[61998]: _type = "Task" [ 890.988415] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.998727] env[61998]: DEBUG oslo_vmware.api [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388668, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.379422] env[61998]: DEBUG nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 891.443893] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.444500] env[61998]: DEBUG nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 891.452522] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.111s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.454399] env[61998]: INFO nova.compute.claims [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.459672] env[61998]: INFO nova.compute.manager [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Took 29.66 seconds to build instance. [ 891.503171] env[61998]: DEBUG oslo_vmware.api [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388668, 'name': ReconfigVM_Task, 'duration_secs': 0.422856} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.504763] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7/volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.509758] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b81ae785-7063-4cfb-a7c2-91481f46e626 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.527650] env[61998]: DEBUG oslo_vmware.api [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 891.527650] env[61998]: value = "task-1388669" [ 891.527650] env[61998]: _type = "Task" [ 891.527650] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.530306] env[61998]: DEBUG nova.compute.manager [req-ba41d224-2d65-4988-ab07-069886f1e284 req-a85e8cc7-a1c7-4784-bd7c-09a96ad6ce45 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Received event network-vif-plugged-47933687-23f9-4630-a0bb-9af926699f03 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 891.530589] env[61998]: DEBUG oslo_concurrency.lockutils [req-ba41d224-2d65-4988-ab07-069886f1e284 req-a85e8cc7-a1c7-4784-bd7c-09a96ad6ce45 service nova] Acquiring lock "2914460e-39e5-495b-96d8-b3580d0318d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.530846] env[61998]: DEBUG oslo_concurrency.lockutils [req-ba41d224-2d65-4988-ab07-069886f1e284 req-a85e8cc7-a1c7-4784-bd7c-09a96ad6ce45 service nova] Lock "2914460e-39e5-495b-96d8-b3580d0318d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.531053] env[61998]: DEBUG oslo_concurrency.lockutils [req-ba41d224-2d65-4988-ab07-069886f1e284 req-a85e8cc7-a1c7-4784-bd7c-09a96ad6ce45 service nova] Lock "2914460e-39e5-495b-96d8-b3580d0318d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.531233] env[61998]: DEBUG nova.compute.manager [req-ba41d224-2d65-4988-ab07-069886f1e284 req-a85e8cc7-a1c7-4784-bd7c-09a96ad6ce45 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] No waiting events found dispatching network-vif-plugged-47933687-23f9-4630-a0bb-9af926699f03 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 891.531585] env[61998]: WARNING nova.compute.manager [req-ba41d224-2d65-4988-ab07-069886f1e284 req-a85e8cc7-a1c7-4784-bd7c-09a96ad6ce45 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Received unexpected event network-vif-plugged-47933687-23f9-4630-a0bb-9af926699f03 for instance with vm_state building and task_state spawning. [ 891.543578] env[61998]: DEBUG oslo_vmware.api [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388669, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.795765] env[61998]: DEBUG nova.network.neutron [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Successfully updated port: 47933687-23f9-4630-a0bb-9af926699f03 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 891.955724] env[61998]: DEBUG nova.compute.utils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 891.962024] env[61998]: DEBUG nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Not allocating networking since 'none' was specified. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 891.966020] env[61998]: DEBUG oslo_concurrency.lockutils [None req-1fa27a88-f41f-42b7-998a-1fccf81f3ebb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "23265b26-7579-4514-a172-8cf2ec124ec6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.975s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.040586] env[61998]: DEBUG oslo_vmware.api [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388669, 'name': ReconfigVM_Task, 'duration_secs': 0.164154} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.041018] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294747', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'name': 'volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dadd9985-bca3-4207-927f-9490e0ae3f10', 'attached_at': '', 'detached_at': '', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'serial': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 892.300674] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "refresh_cache-2914460e-39e5-495b-96d8-b3580d0318d6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.300674] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquired lock "refresh_cache-2914460e-39e5-495b-96d8-b3580d0318d6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.300674] env[61998]: DEBUG nova.network.neutron [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.461884] env[61998]: DEBUG nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 892.651492] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05f06d8-6b76-48ae-8408-7e3a238551f7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.660053] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b007cc-eb69-41ba-84f8-19d408f92e69 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.695425] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac7c5d8-ca03-4713-ab6e-131ebf6c0045 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.703692] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93f8f7d-fc39-47c6-ab9a-c132448631da {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.719161] env[61998]: DEBUG nova.compute.provider_tree [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.864645] env[61998]: DEBUG nova.network.neutron [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.087055] env[61998]: DEBUG nova.objects.instance [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lazy-loading 'flavor' on Instance uuid dadd9985-bca3-4207-927f-9490e0ae3f10 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.115078] env[61998]: DEBUG nova.network.neutron [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Updating instance_info_cache with network_info: [{"id": "47933687-23f9-4630-a0bb-9af926699f03", "address": "fa:16:3e:83:f2:ae", "network": {"id": "e38dbc59-9c0e-4746-bb7a-d49420e5ba42", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-482488625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8202a47486224771ae061f4787686ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47933687-23", "ovs_interfaceid": "47933687-23f9-4630-a0bb-9af926699f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.223770] env[61998]: DEBUG nova.scheduler.client.report [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 893.479059] env[61998]: DEBUG nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 893.592482] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb838cc-dce3-431b-a314-aab3fbb15d34 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.363s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.617824] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Releasing lock "refresh_cache-2914460e-39e5-495b-96d8-b3580d0318d6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.618218] env[61998]: DEBUG nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Instance network_info: |[{"id": "47933687-23f9-4630-a0bb-9af926699f03", "address": "fa:16:3e:83:f2:ae", "network": {"id": "e38dbc59-9c0e-4746-bb7a-d49420e5ba42", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-482488625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8202a47486224771ae061f4787686ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47933687-23", "ovs_interfaceid": "47933687-23f9-4630-a0bb-9af926699f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 893.730071] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.730845] env[61998]: DEBUG nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 893.985200] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-c51f684b-84f0-42b3-acf9-9e8317b10cb6-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.985558] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c51f684b-84f0-42b3-acf9-9e8317b10cb6-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.985986] env[61998]: DEBUG nova.objects.instance [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'flavor' on Instance uuid c51f684b-84f0-42b3-acf9-9e8317b10cb6 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.235807] env[61998]: DEBUG nova.compute.utils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.237507] env[61998]: DEBUG nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 894.237767] env[61998]: DEBUG nova.network.neutron [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.289366] env[61998]: DEBUG nova.policy [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '744da696f7c64f62ae04195aa737fab4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c75c9b7c8d6b441d80fe512c37c88679', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 894.564253] env[61998]: DEBUG nova.network.neutron [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Successfully created port: 9852375c-4fe3-4053-89ac-5a75e475ef56 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.579462] env[61998]: DEBUG nova.objects.instance [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'pci_requests' on Instance uuid c51f684b-84f0-42b3-acf9-9e8317b10cb6 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.741427] env[61998]: DEBUG nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 895.080967] env[61998]: DEBUG nova.objects.base [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 895.081251] env[61998]: DEBUG nova.network.neutron [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 895.123695] env[61998]: DEBUG nova.policy [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 895.441348] env[61998]: DEBUG nova.network.neutron [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Successfully created port: 95b14cbf-3b14-4a08-a168-03339234265d {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.492717] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.492985] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.493178] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.493379] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.493528] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.493679] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.493901] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.494077] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.494273] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.494457] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.494635] env[61998]: DEBUG nova.virt.hardware [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.497116] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ab7088-0b7e-4189-b952-f2da600dc184 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.508746] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.508993] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.509204] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.509356] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.509505] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.509689] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.509926] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.510109] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.510287] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.510456] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.510743] env[61998]: DEBUG nova.virt.hardware [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.512167] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8a7ad3-25d1-4bd6-b1ef-318f81a93813 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.518311] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172421b0-b1b2-4d07-9d72-f7e3a49fa345 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.531628] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:f2:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e028024-a9c1-4cae-8849-ea770a7ae0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47933687-23f9-4630-a0bb-9af926699f03', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.539236] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Creating folder: Project (8202a47486224771ae061f4787686ccb). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 895.541553] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1d65f-ade6-d4a7-1863-efbce7182068/disk-0.vmdk. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 895.544157] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a43f1065-6957-45fe-8a8c-8e884c4c0f65 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.546370] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1d3b02-1f3e-475c-bdec-1038fe0aed65 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.550452] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dfe690-de03-47c4-bf23-43f6ea16fd73 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.567249] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.571957] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Creating folder: Project (b9a7dc805d23461fb9abff39c2df6581). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 895.573039] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1d65f-ade6-d4a7-1863-efbce7182068/disk-0.vmdk is in state: ready. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 895.573039] env[61998]: ERROR oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1d65f-ade6-d4a7-1863-efbce7182068/disk-0.vmdk due to incomplete transfer. [ 895.574036] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08d0988f-54db-47ee-a0fa-85690562d8e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.575822] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c5903d6f-9b48-4df3-8337-51d841a29fd2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.577346] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Created folder: Project (8202a47486224771ae061f4787686ccb) in parent group-v294665. [ 895.577891] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Creating folder: Instances. Parent ref: group-v294750. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 895.577891] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-421246d5-bb93-4fa4-98a3-34df81f32915 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.587170] env[61998]: DEBUG oslo_vmware.rw_handles [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a1d65f-ade6-d4a7-1863-efbce7182068/disk-0.vmdk. {{(pid=61998) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 895.587389] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Uploaded image 01fdcc19-75c0-4d14-9ab4-17361ee18eeb to the Glance image server {{(pid=61998) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 895.589129] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Destroying the VM {{(pid=61998) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 895.591834] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cfba407d-e18d-49da-85fe-c0007bc310c3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.593526] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Created folder: Instances in parent group-v294750. [ 895.593763] env[61998]: DEBUG oslo.service.loopingcall [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.593968] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Created folder: Project (b9a7dc805d23461fb9abff39c2df6581) in parent group-v294665. [ 895.594145] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Creating folder: Instances. Parent ref: group-v294751. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 895.594387] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.594879] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36407e3c-a6bb-43a6-9d0c-1811924966cf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.596488] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8993b75-9009-4847-92be-7b08870719b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.614668] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 895.614668] env[61998]: value = "task-1388673" [ 895.614668] env[61998]: _type = "Task" [ 895.614668] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.620218] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.620218] env[61998]: value = "task-1388675" [ 895.620218] env[61998]: _type = "Task" [ 895.620218] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.627102] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Created folder: Instances in parent group-v294751. [ 895.627102] env[61998]: DEBUG oslo.service.loopingcall [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.634443] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.634443] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388673, 'name': Destroy_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.634443] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3bbb942-c62d-47e8-b4a3-32d309118c1a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.647205] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388675, 'name': CreateVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.652565] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.652565] env[61998]: value = "task-1388676" [ 895.652565] env[61998]: _type = "Task" [ 895.652565] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.664887] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388676, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.753464] env[61998]: DEBUG nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 895.783181] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.783608] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.783900] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.784153] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.784343] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.784529] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.785577] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.785577] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.785577] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.785577] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.785767] env[61998]: DEBUG nova.virt.hardware [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.788156] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8638c01-f7fb-49c1-9cce-e2dac03be03e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.793414] env[61998]: DEBUG nova.compute.manager [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Received event network-changed-47933687-23f9-4630-a0bb-9af926699f03 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 895.793616] env[61998]: DEBUG nova.compute.manager [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Refreshing instance network info cache due to event network-changed-47933687-23f9-4630-a0bb-9af926699f03. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 895.793839] env[61998]: DEBUG oslo_concurrency.lockutils [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] Acquiring lock "refresh_cache-2914460e-39e5-495b-96d8-b3580d0318d6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.794065] env[61998]: DEBUG oslo_concurrency.lockutils [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] Acquired lock "refresh_cache-2914460e-39e5-495b-96d8-b3580d0318d6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.794180] env[61998]: DEBUG nova.network.neutron [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Refreshing network info cache for port 47933687-23f9-4630-a0bb-9af926699f03 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.801783] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cc94bb-e631-4ad1-9680-f72e888ff5cd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.086752] env[61998]: INFO nova.compute.manager [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Rebuilding instance [ 896.136731] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388673, 'name': Destroy_Task, 'duration_secs': 0.456423} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.137409] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Destroyed the VM [ 896.137734] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Deleting Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 896.138939] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ba3622f4-1105-4ffb-a506-2efe8b1d1c19 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.145694] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388675, 'name': CreateVM_Task, 'duration_secs': 0.426434} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.146353] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.147655] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.147874] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.148232] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 896.150901] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08f8fda5-2604-4414-982d-40efce086787 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.156186] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 896.156186] env[61998]: value = "task-1388677" [ 896.156186] env[61998]: _type = "Task" [ 896.156186] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.156650] env[61998]: DEBUG nova.compute.manager [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 896.162204] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f969e26-aa62-4133-bf1b-90b3003f1e28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.169261] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 896.169261] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b14373-ee58-c421-2aaa-4949684df418" [ 896.169261] env[61998]: _type = "Task" [ 896.169261] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.179684] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388676, 'name': CreateVM_Task, 'duration_secs': 0.330545} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.182349] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388677, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.183383] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.183889] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.188194] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b14373-ee58-c421-2aaa-4949684df418, 'name': SearchDatastore_Task, 'duration_secs': 0.008996} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.188481] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.188801] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.188932] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.189095] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.189287] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.189574] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.189881] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 896.190184] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1f61f4c-4b96-4bd2-90f7-318fa8676378 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.192079] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88935f6f-a709-43cc-9958-17e0d09f1ad4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.199468] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 896.199468] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5221195a-2cbf-ab3e-21ad-cd111ea4c8ac" [ 896.199468] env[61998]: _type = "Task" [ 896.199468] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.203500] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.203686] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.204718] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e3e72e4-f6bc-4581-b87a-c86abeee042d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.211047] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5221195a-2cbf-ab3e-21ad-cd111ea4c8ac, 'name': SearchDatastore_Task, 'duration_secs': 0.00893} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.211798] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.212044] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.212309] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.214480] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 896.214480] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52522695-a568-0d47-94b8-0c1b45a1980c" [ 896.214480] env[61998]: _type = "Task" [ 896.214480] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.222883] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52522695-a568-0d47-94b8-0c1b45a1980c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.261867] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "dadd9985-bca3-4207-927f-9490e0ae3f10" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.261867] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.493148] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 896.493633] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a83c7c7-0146-432f-bd8b-7bbcc89eab7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.499933] env[61998]: DEBUG nova.network.neutron [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Updated VIF entry in instance network info cache for port 47933687-23f9-4630-a0bb-9af926699f03. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.500296] env[61998]: DEBUG nova.network.neutron [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Updating instance_info_cache with network_info: [{"id": "47933687-23f9-4630-a0bb-9af926699f03", "address": "fa:16:3e:83:f2:ae", "network": {"id": "e38dbc59-9c0e-4746-bb7a-d49420e5ba42", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-482488625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8202a47486224771ae061f4787686ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47933687-23", "ovs_interfaceid": "47933687-23f9-4630-a0bb-9af926699f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.502716] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 896.502716] env[61998]: value = "task-1388678" [ 896.502716] env[61998]: _type = "Task" [ 896.502716] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.511780] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.604190] env[61998]: DEBUG nova.network.neutron [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Successfully updated port: 9852375c-4fe3-4053-89ac-5a75e475ef56 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.668085] env[61998]: DEBUG nova.compute.manager [req-100d650a-39fd-4325-8ea0-c54ad38c93ed req-a43f04de-6bbd-47c9-90ee-f55afe52d81c service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Received event network-vif-plugged-9852375c-4fe3-4053-89ac-5a75e475ef56 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 896.668225] env[61998]: DEBUG oslo_concurrency.lockutils [req-100d650a-39fd-4325-8ea0-c54ad38c93ed req-a43f04de-6bbd-47c9-90ee-f55afe52d81c service nova] Acquiring lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.668441] env[61998]: DEBUG oslo_concurrency.lockutils [req-100d650a-39fd-4325-8ea0-c54ad38c93ed req-a43f04de-6bbd-47c9-90ee-f55afe52d81c service nova] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.668616] env[61998]: DEBUG oslo_concurrency.lockutils [req-100d650a-39fd-4325-8ea0-c54ad38c93ed req-a43f04de-6bbd-47c9-90ee-f55afe52d81c service nova] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.668787] env[61998]: DEBUG nova.compute.manager [req-100d650a-39fd-4325-8ea0-c54ad38c93ed req-a43f04de-6bbd-47c9-90ee-f55afe52d81c service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] No waiting events found dispatching network-vif-plugged-9852375c-4fe3-4053-89ac-5a75e475ef56 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.668956] env[61998]: WARNING nova.compute.manager [req-100d650a-39fd-4325-8ea0-c54ad38c93ed req-a43f04de-6bbd-47c9-90ee-f55afe52d81c service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Received unexpected event network-vif-plugged-9852375c-4fe3-4053-89ac-5a75e475ef56 for instance with vm_state building and task_state spawning. [ 896.675581] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388677, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.724612] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52522695-a568-0d47-94b8-0c1b45a1980c, 'name': SearchDatastore_Task, 'duration_secs': 0.008069} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.725806] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e216be5-ecec-434e-b0ea-b269c7a8c501 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.731127] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 896.731127] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52dd78be-a608-8435-7aeb-293be3be29a7" [ 896.731127] env[61998]: _type = "Task" [ 896.731127] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.739070] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52dd78be-a608-8435-7aeb-293be3be29a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.764701] env[61998]: INFO nova.compute.manager [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Detaching volume f6cad4b4-0a76-4bbc-8faf-8797e51710f7 [ 896.796425] env[61998]: INFO nova.virt.block_device [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Attempting to driver detach volume f6cad4b4-0a76-4bbc-8faf-8797e51710f7 from mountpoint /dev/sdb [ 896.796677] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Volume detach. Driver type: vmdk {{(pid=61998) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 896.796993] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294747', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'name': 'volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dadd9985-bca3-4207-927f-9490e0ae3f10', 'attached_at': '', 'detached_at': '', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'serial': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 896.797755] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fd4bcb-5b53-456b-aec4-d36dc9e97887 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.819963] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cb3fe4-7c90-4146-a6fb-aa7c85a1aa8f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.828119] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f722f7-8065-4047-b9cb-a0f860e71834 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.850635] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4512732-1fd1-4e90-9567-db7199da3faa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.865007] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] The volume has not been displaced from its original location: [datastore1] volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7/volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7.vmdk. No consolidation needed. {{(pid=61998) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 896.870273] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 896.872303] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5284c69b-8312-72b1-d020-17bf3b72924c/disk-0.vmdk. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 896.873474] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57c715e3-b650-4377-b952-807321bbe1cb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.887072] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff32247-88d9-4171-a141-531a38e9dd0c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.890849] env[61998]: DEBUG nova.compute.manager [req-c88a7845-595d-4b90-9e47-a77543af6a65 req-9fbcb76b-8181-4d29-8e13-96e15f69fec1 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received event network-vif-plugged-95b14cbf-3b14-4a08-a168-03339234265d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 896.891317] env[61998]: DEBUG oslo_concurrency.lockutils [req-c88a7845-595d-4b90-9e47-a77543af6a65 req-9fbcb76b-8181-4d29-8e13-96e15f69fec1 service nova] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.891537] env[61998]: DEBUG oslo_concurrency.lockutils [req-c88a7845-595d-4b90-9e47-a77543af6a65 req-9fbcb76b-8181-4d29-8e13-96e15f69fec1 service nova] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.891712] env[61998]: DEBUG oslo_concurrency.lockutils [req-c88a7845-595d-4b90-9e47-a77543af6a65 req-9fbcb76b-8181-4d29-8e13-96e15f69fec1 service nova] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.892152] env[61998]: DEBUG nova.compute.manager [req-c88a7845-595d-4b90-9e47-a77543af6a65 req-9fbcb76b-8181-4d29-8e13-96e15f69fec1 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] No waiting events found dispatching network-vif-plugged-95b14cbf-3b14-4a08-a168-03339234265d {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.892336] env[61998]: WARNING nova.compute.manager [req-c88a7845-595d-4b90-9e47-a77543af6a65 req-9fbcb76b-8181-4d29-8e13-96e15f69fec1 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received unexpected event network-vif-plugged-95b14cbf-3b14-4a08-a168-03339234265d for instance with vm_state active and task_state None. [ 896.897306] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5284c69b-8312-72b1-d020-17bf3b72924c/disk-0.vmdk is in state: ready. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 896.897518] env[61998]: ERROR oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5284c69b-8312-72b1-d020-17bf3b72924c/disk-0.vmdk due to incomplete transfer. [ 896.898666] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d77f0853-116d-4dc2-97c6-b0d802fef27f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.900118] env[61998]: DEBUG oslo_vmware.api [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 896.900118] env[61998]: value = "task-1388679" [ 896.900118] env[61998]: _type = "Task" [ 896.900118] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.905562] env[61998]: DEBUG oslo_vmware.rw_handles [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5284c69b-8312-72b1-d020-17bf3b72924c/disk-0.vmdk. {{(pid=61998) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 896.905746] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Uploaded image 45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af to the Glance image server {{(pid=61998) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 896.908357] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Destroying the VM {{(pid=61998) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 896.911790] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2aaea746-526a-4a61-b6a0-2981239093f9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.913125] env[61998]: DEBUG oslo_vmware.api [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388679, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.918039] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 896.918039] env[61998]: value = "task-1388680" [ 896.918039] env[61998]: _type = "Task" [ 896.918039] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.930361] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388680, 'name': Destroy_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.954277] env[61998]: DEBUG nova.network.neutron [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Successfully updated port: 95b14cbf-3b14-4a08-a168-03339234265d {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.004550] env[61998]: DEBUG oslo_concurrency.lockutils [req-2d7abe8c-7d64-4b97-8ce1-59245550e577 req-5b275b9d-7a10-4f32-b3b6-3e92590941c2 service nova] Releasing lock "refresh_cache-2914460e-39e5-495b-96d8-b3580d0318d6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.013358] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388678, 'name': PowerOffVM_Task, 'duration_secs': 0.22822} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.013607] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.014384] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cbb312-d378-4898-b7cf-8ed3a4993272 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.034633] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019caca3-00dd-41f0-ab1d-18072ffe01fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.068085] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.068436] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bff5647b-fc04-4740-9bcc-c613ff8b058b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.075464] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 897.075464] env[61998]: value = "task-1388681" [ 897.075464] env[61998]: _type = "Task" [ 897.075464] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.084518] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] VM already powered off {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 897.084725] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.084956] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.107215] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-6e71b3c4-bac7-455c-94fd-2a9bc5128132" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.107429] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-6e71b3c4-bac7-455c-94fd-2a9bc5128132" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.107589] env[61998]: DEBUG nova.network.neutron [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.173235] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388677, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.187459] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.187820] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b23a9d3b-f7f8-4b7d-a9d3-27077c2c1f26 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.195113] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 897.195113] env[61998]: value = "task-1388682" [ 897.195113] env[61998]: _type = "Task" [ 897.195113] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.205177] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388682, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.241933] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52dd78be-a608-8435-7aeb-293be3be29a7, 'name': SearchDatastore_Task, 'duration_secs': 0.00924} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.242122] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.242309] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 2914460e-39e5-495b-96d8-b3580d0318d6/2914460e-39e5-495b-96d8-b3580d0318d6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.242667] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.243037] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.243226] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ac58223-27d9-47e4-b997-aa0d6bc9d6a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.245517] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf99d8c6-4d72-43d5-b8c5-06e8d91d0d8d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.253200] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 897.253200] env[61998]: value = "task-1388683" [ 897.253200] env[61998]: _type = "Task" [ 897.253200] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.257398] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.257589] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.258678] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72fa3b43-9d84-47e8-91a3-b333634039c8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.264048] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.267441] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 897.267441] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529cf415-35be-c5f2-da1c-7c111485148d" [ 897.267441] env[61998]: _type = "Task" [ 897.267441] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.275467] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529cf415-35be-c5f2-da1c-7c111485148d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.410605] env[61998]: DEBUG oslo_vmware.api [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388679, 'name': ReconfigVM_Task, 'duration_secs': 0.232092} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.410916] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 897.415887] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d03ab3db-076b-4662-9f07-1e117844600c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.435298] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388680, 'name': Destroy_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.436750] env[61998]: DEBUG oslo_vmware.api [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 897.436750] env[61998]: value = "task-1388684" [ 897.436750] env[61998]: _type = "Task" [ 897.436750] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.444805] env[61998]: DEBUG oslo_vmware.api [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388684, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.456769] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.456986] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.457344] env[61998]: DEBUG nova.network.neutron [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.646817] env[61998]: DEBUG nova.network.neutron [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.675103] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388677, 'name': RemoveSnapshot_Task, 'duration_secs': 1.24305} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.675474] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Deleted Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 897.675773] env[61998]: DEBUG nova.compute.manager [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 897.676785] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbb0564-cdce-42c2-9c07-f12762432e2b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.706159] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388682, 'name': PowerOffVM_Task, 'duration_secs': 0.194736} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.706682] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.706724] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.707642] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aec5e4a-ed50-4f20-ace0-df5f1910e5da {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.717714] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 897.718073] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77709e41-da84-4e3d-a6f9-730a0af22eb9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.765595] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493648} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.765863] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 2914460e-39e5-495b-96d8-b3580d0318d6/2914460e-39e5-495b-96d8-b3580d0318d6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.766092] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.766365] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1312b387-3c33-4159-9d35-163f614451a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.774982] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 897.774982] env[61998]: value = "task-1388686" [ 897.774982] env[61998]: _type = "Task" [ 897.774982] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.778151] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529cf415-35be-c5f2-da1c-7c111485148d, 'name': SearchDatastore_Task, 'duration_secs': 0.019821} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.781473] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b3a0d52-4a05-4ec1-ace4-0a6ba9799eaa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.788261] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 897.788261] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52a57d15-8c3f-d7c1-306e-28a1c4cfff1a" [ 897.788261] env[61998]: _type = "Task" [ 897.788261] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.791350] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388686, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.799305] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a57d15-8c3f-d7c1-306e-28a1c4cfff1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.804164] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 897.804164] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 897.804164] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleting the datastore file [datastore1] 23265b26-7579-4514-a172-8cf2ec124ec6 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 897.804164] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9c51a12-4900-424a-a532-685b0df0c49d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.809472] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 897.809472] env[61998]: value = "task-1388687" [ 897.809472] env[61998]: _type = "Task" [ 897.809472] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.818496] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388687, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.865624] env[61998]: DEBUG nova.network.neutron [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Updating instance_info_cache with network_info: [{"id": "9852375c-4fe3-4053-89ac-5a75e475ef56", "address": "fa:16:3e:f3:fd:9f", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9852375c-4f", "ovs_interfaceid": "9852375c-4fe3-4053-89ac-5a75e475ef56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.873955] env[61998]: DEBUG nova.compute.manager [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Received event network-changed-9852375c-4fe3-4053-89ac-5a75e475ef56 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 897.874979] env[61998]: DEBUG nova.compute.manager [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Refreshing instance network info cache due to event network-changed-9852375c-4fe3-4053-89ac-5a75e475ef56. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 897.874979] env[61998]: DEBUG oslo_concurrency.lockutils [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] Acquiring lock "refresh_cache-6e71b3c4-bac7-455c-94fd-2a9bc5128132" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.935679] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388680, 'name': Destroy_Task, 'duration_secs': 0.566337} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.935954] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Destroyed the VM [ 897.936248] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Deleting Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 897.936519] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d297c039-c2e2-4c64-bd87-5c66133beaea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.946350] env[61998]: DEBUG oslo_vmware.api [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388684, 'name': ReconfigVM_Task, 'duration_secs': 0.150486} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.947456] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294747', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'name': 'volume-f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'dadd9985-bca3-4207-927f-9490e0ae3f10', 'attached_at': '', 'detached_at': '', 'volume_id': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7', 'serial': 'f6cad4b4-0a76-4bbc-8faf-8797e51710f7'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 897.949949] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 897.949949] env[61998]: value = "task-1388688" [ 897.949949] env[61998]: _type = "Task" [ 897.949949] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.958290] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388688, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.991683] env[61998]: WARNING nova.network.neutron [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] 9b8c99a8-8481-43b6-bb09-1739b4f749c3 already exists in list: networks containing: ['9b8c99a8-8481-43b6-bb09-1739b4f749c3']. ignoring it [ 898.191616] env[61998]: INFO nova.compute.manager [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Shelve offloading [ 898.288029] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388686, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066628} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.288321] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.289128] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35c6b7a-09e2-4373-a823-5926c6c90458 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.318059] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 2914460e-39e5-495b-96d8-b3580d0318d6/2914460e-39e5-495b-96d8-b3580d0318d6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.321718] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d3958d2-1fbd-40cc-b971-0ac46ac4c13f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.336421] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52a57d15-8c3f-d7c1-306e-28a1c4cfff1a, 'name': SearchDatastore_Task, 'duration_secs': 0.009781} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.340168] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.340457] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.341184] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.341397] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.341643] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e6970f6-63b8-40a9-98c3-6b5c1f57f00f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.343907] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ac4f441-438a-4825-97cf-917f174a4baa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.350148] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 898.350148] env[61998]: value = "task-1388689" [ 898.350148] env[61998]: _type = "Task" [ 898.350148] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.350148] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388687, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289402} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.350560] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 898.350822] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 898.351059] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.359776] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 898.359776] env[61998]: value = "task-1388690" [ 898.359776] env[61998]: _type = "Task" [ 898.359776] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.361447] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.361676] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 898.368985] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2a32cb6-b4e2-4807-bd94-c314efba8a3f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.375037] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-6e71b3c4-bac7-455c-94fd-2a9bc5128132" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.375432] env[61998]: DEBUG nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Instance network_info: |[{"id": "9852375c-4fe3-4053-89ac-5a75e475ef56", "address": "fa:16:3e:f3:fd:9f", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9852375c-4f", "ovs_interfaceid": "9852375c-4fe3-4053-89ac-5a75e475ef56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 898.375798] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388689, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.376150] env[61998]: DEBUG oslo_concurrency.lockutils [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] Acquired lock "refresh_cache-6e71b3c4-bac7-455c-94fd-2a9bc5128132" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.376386] env[61998]: DEBUG nova.network.neutron [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Refreshing network info cache for port 9852375c-4fe3-4053-89ac-5a75e475ef56 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.377828] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:fd:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9852375c-4fe3-4053-89ac-5a75e475ef56', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.386699] env[61998]: DEBUG oslo.service.loopingcall [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.391647] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.396921] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56f3e31c-5b94-4e74-b8ce-58a6b7c81d17 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.411269] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 898.411269] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529e2e70-b9c1-99bd-f2a6-07724e06925f" [ 898.411269] env[61998]: _type = "Task" [ 898.411269] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.411850] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.412941] env[61998]: DEBUG nova.network.neutron [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "95b14cbf-3b14-4a08-a168-03339234265d", "address": "fa:16:3e:df:07:14", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95b14cbf-3b", "ovs_interfaceid": "95b14cbf-3b14-4a08-a168-03339234265d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.419175] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.419175] env[61998]: value = "task-1388691" [ 898.419175] env[61998]: _type = "Task" [ 898.419175] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.426381] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529e2e70-b9c1-99bd-f2a6-07724e06925f, 'name': SearchDatastore_Task, 'duration_secs': 0.025068} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.427925] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-750b9b61-9951-4a00-93c1-ecb52bfe7923 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.433456] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388691, 'name': CreateVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.437208] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 898.437208] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523a1143-801f-c4eb-6175-d40612411d28" [ 898.437208] env[61998]: _type = "Task" [ 898.437208] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.446672] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523a1143-801f-c4eb-6175-d40612411d28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.461911] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388688, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.497366] env[61998]: DEBUG nova.objects.instance [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lazy-loading 'flavor' on Instance uuid dadd9985-bca3-4207-927f-9490e0ae3f10 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.638916] env[61998]: DEBUG nova.network.neutron [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Updated VIF entry in instance network info cache for port 9852375c-4fe3-4053-89ac-5a75e475ef56. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.639364] env[61998]: DEBUG nova.network.neutron [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Updating instance_info_cache with network_info: [{"id": "9852375c-4fe3-4053-89ac-5a75e475ef56", "address": "fa:16:3e:f3:fd:9f", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9852375c-4f", "ovs_interfaceid": "9852375c-4fe3-4053-89ac-5a75e475ef56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.695371] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.695878] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b5fe4d8-393c-437b-9b33-6f9e614663a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.704354] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 898.704354] env[61998]: value = "task-1388692" [ 898.704354] env[61998]: _type = "Task" [ 898.704354] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.716354] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] VM already powered off {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 898.716727] env[61998]: DEBUG nova.compute.manager [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 898.717750] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f00fee-c2b8-463b-96f9-ab83e3cb0b3a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.725112] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.725452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.725661] env[61998]: DEBUG nova.network.neutron [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.864451] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388689, 'name': ReconfigVM_Task, 'duration_secs': 0.391992} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.865352] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 2914460e-39e5-495b-96d8-b3580d0318d6/2914460e-39e5-495b-96d8-b3580d0318d6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.869135] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d50a0b2-9b61-4d5c-92e8-73ab3560d99f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.876369] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388690, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.877988] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 898.877988] env[61998]: value = "task-1388693" [ 898.877988] env[61998]: _type = "Task" [ 898.877988] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.886128] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388693, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.919380] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.920061] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.920230] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.921181] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d957b8b-9c52-42de-af40-be030358999d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.934404] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388691, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.948167] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.948553] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.948735] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.949073] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.949281] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.949451] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.949698] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.949887] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.950102] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.950279] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.950456] env[61998]: DEBUG nova.virt.hardware [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.957647] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Reconfiguring VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 898.961440] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84442013-1376-45c4-8119-a74018884f93 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.986970] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523a1143-801f-c4eb-6175-d40612411d28, 'name': SearchDatastore_Task, 'duration_secs': 0.015859} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.987261] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388688, 'name': RemoveSnapshot_Task, 'duration_secs': 0.582688} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.988661] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.988955] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk. {{(pid=61998) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 898.989287] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Deleted Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 898.989568] env[61998]: DEBUG nova.compute.manager [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 898.989886] env[61998]: DEBUG oslo_vmware.api [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 898.989886] env[61998]: value = "task-1388694" [ 898.989886] env[61998]: _type = "Task" [ 898.989886] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.990111] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-893828d1-beca-466c-b57d-4e90232d992a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.992845] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd076f4-07ad-47fc-b21d-9c4655f5728b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.004246] env[61998]: DEBUG nova.compute.manager [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received event network-changed-95b14cbf-3b14-4a08-a168-03339234265d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 899.004452] env[61998]: DEBUG nova.compute.manager [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Refreshing instance network info cache due to event network-changed-95b14cbf-3b14-4a08-a168-03339234265d. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 899.004674] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] Acquiring lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.004822] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] Acquired lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.004986] env[61998]: DEBUG nova.network.neutron [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Refreshing network info cache for port 95b14cbf-3b14-4a08-a168-03339234265d {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.013093] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 899.013093] env[61998]: value = "task-1388695" [ 899.013093] env[61998]: _type = "Task" [ 899.013093] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.013432] env[61998]: DEBUG oslo_vmware.api [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388694, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.029692] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.142932] env[61998]: DEBUG oslo_concurrency.lockutils [req-57a8b1e4-bd1e-474a-a8b8-b158978d4427 req-4a964fc7-c49d-460b-889a-66c823c162d6 service nova] Releasing lock "refresh_cache-6e71b3c4-bac7-455c-94fd-2a9bc5128132" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.215829] env[61998]: DEBUG nova.network.neutron [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updated VIF entry in instance network info cache for port 95b14cbf-3b14-4a08-a168-03339234265d. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.216318] env[61998]: DEBUG nova.network.neutron [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "95b14cbf-3b14-4a08-a168-03339234265d", "address": "fa:16:3e:df:07:14", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95b14cbf-3b", "ovs_interfaceid": "95b14cbf-3b14-4a08-a168-03339234265d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.380175] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709236} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.383488] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.383722] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.385935] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3150d8b0-c341-4a9a-a3e5-354e5a3c2a7f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.394783] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388693, 'name': Rename_Task, 'duration_secs': 0.171449} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.398294] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 899.398563] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 899.398706] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.398941] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 899.399131] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.399308] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 899.399566] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 899.399737] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 899.399915] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 899.400123] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 899.400385] env[61998]: DEBUG nova.virt.hardware [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 899.400782] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.401088] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 899.401088] env[61998]: value = "task-1388696" [ 899.401088] env[61998]: _type = "Task" [ 899.401088] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.401848] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98043bf-0eaf-4859-91c5-d41ccbc68609 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.404620] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-638db207-f1b6-4479-8e02-7cac29a8f360 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.419336] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.422678] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee0b1c1-adbf-4656-944c-98b23347756d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.429075] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 899.429075] env[61998]: value = "task-1388697" [ 899.429075] env[61998]: _type = "Task" [ 899.429075] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.430397] env[61998]: DEBUG nova.network.neutron [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Updating instance_info_cache with network_info: [{"id": "089c550f-d232-4727-b576-df921335d3e4", "address": "fa:16:3e:93:ad:b8", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap089c550f-d2", "ovs_interfaceid": "089c550f-d232-4727-b576-df921335d3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.455865] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:ef:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cb05d6f-fb33-4e35-a7a7-862b3b11b653', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.463574] env[61998]: DEBUG oslo.service.loopingcall [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 899.467950] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.468222] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388691, 'name': CreateVM_Task, 'duration_secs': 0.778373} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.468427] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0982ddd7-f3cf-40a8-926d-86e6b6c77d36 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.484381] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.487307] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.487497] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.487850] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.488241] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388697, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.488823] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b594f332-cf9a-4cb0-969e-360f32d2bcc3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.493873] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.493873] env[61998]: value = "task-1388698" [ 899.493873] env[61998]: _type = "Task" [ 899.493873] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.495333] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 899.495333] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5254f1fc-ba27-7e28-8cc5-655a4188fdc9" [ 899.495333] env[61998]: _type = "Task" [ 899.495333] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.509926] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388698, 'name': CreateVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.516623] env[61998]: DEBUG oslo_vmware.api [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388694, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.516871] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5254f1fc-ba27-7e28-8cc5-655a4188fdc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.517244] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7b31ec6e-5b3d-4bef-a5c6-5ecde43979b1 tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.255s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.525158] env[61998]: INFO nova.compute.manager [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Shelve offloading [ 899.532279] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388695, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.720106] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b38af22-9e6b-4c39-8565-fffe03b1fe34 req-4336fa5d-3601-4fc4-ac04-a22668cbba72 service nova] Releasing lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.917064] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.257124} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.917064] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.917371] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9c4847-e1d5-4f55-8b08-53dd3be10746 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.936676] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.936942] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39585c86-4cd3-494f-8fd1-0cc92cbe2e5e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.951918] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.962590] env[61998]: DEBUG oslo_vmware.api [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388697, 'name': PowerOnVM_Task, 'duration_secs': 0.481692} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.964088] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.964088] env[61998]: INFO nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Took 8.58 seconds to spawn the instance on the hypervisor. [ 899.964545] env[61998]: DEBUG nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 899.964545] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 899.964545] env[61998]: value = "task-1388699" [ 899.964545] env[61998]: _type = "Task" [ 899.964545] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.965396] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564267af-da00-4b17-969f-92b56a0b414a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.975245] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388699, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.012793] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388698, 'name': CreateVM_Task, 'duration_secs': 0.381195} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.020269] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.020945] env[61998]: DEBUG oslo_vmware.api [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388694, 'name': ReconfigVM_Task, 'duration_secs': 0.981709} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.021849] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5254f1fc-ba27-7e28-8cc5-655a4188fdc9, 'name': SearchDatastore_Task, 'duration_secs': 0.048469} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.024007] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.024259] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.024615] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 900.025303] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.025303] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Reconfigured VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 900.031906] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.032356] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.032670] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.032928] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.033249] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.034212] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cb4930f-8e53-45e5-a967-8c88ebfe1e33 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.036729] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.037055] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04240d91-944f-4c32-87a8-44d1f209d5b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.041053] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-102c97fb-cfdf-4bce-9d24-dce2085804c6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.050599] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629635} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.051640] env[61998]: INFO nova.virt.vmwareapi.ds_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk. [ 900.054527] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63f8268-23a4-4a8c-bd50-b8b31194ef42 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.058534] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 900.058534] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b1943e-b3db-cb94-61b3-dd6beb2522dc" [ 900.058534] env[61998]: _type = "Task" [ 900.058534] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.061453] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.061755] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.062875] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 900.062875] env[61998]: value = "task-1388700" [ 900.062875] env[61998]: _type = "Task" [ 900.062875] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.067714] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea171745-aa1d-4b0c-984a-f175deb8314b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.101919] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.105194] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0993645-ca36-4c50-a135-8701082ecbe3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.123110] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 900.123110] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]521fb488-2b8f-2b35-6ae8-1bb3a9861e43" [ 900.123110] env[61998]: _type = "Task" [ 900.123110] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.123398] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b1943e-b3db-cb94-61b3-dd6beb2522dc, 'name': SearchDatastore_Task, 'duration_secs': 0.02667} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.124068] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.124347] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.124597] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.124762] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.124959] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.132162] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa4c4c9b-c6fc-4c08-b1b2-f14a824fb34f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.135576] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] VM already powered off {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 900.135777] env[61998]: DEBUG nova.compute.manager [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 900.136102] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 900.136102] env[61998]: value = "task-1388701" [ 900.136102] env[61998]: _type = "Task" [ 900.136102] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.137338] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe81e28-f554-421b-9325-689e8cca2364 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.145886] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521fb488-2b8f-2b35-6ae8-1bb3a9861e43, 'name': SearchDatastore_Task, 'duration_secs': 0.024736} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.148225] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16a44d3b-36b3-40a1-a7fe-fab43b32a528 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.150512] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.150690] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.155906] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8619e321-f09f-4799-a602-a60de91e1685 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.158249] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.158417] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.158591] env[61998]: DEBUG nova.network.neutron [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 900.159801] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.163233] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 900.163233] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]525f8665-9ef2-f9e7-7b3b-2f4c07d1e648" [ 900.163233] env[61998]: _type = "Task" [ 900.163233] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.164568] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 900.164568] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c675fa-2b21-e5e2-b35b-913bd0b22e78" [ 900.164568] env[61998]: _type = "Task" [ 900.164568] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.175922] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525f8665-9ef2-f9e7-7b3b-2f4c07d1e648, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.179044] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c675fa-2b21-e5e2-b35b-913bd0b22e78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.346720] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.347677] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2768ffe0-f267-4bca-9fb9-16fbbd195b37 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.355081] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.355366] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-273442d0-9a99-4b87-8130-375a8b7ca7e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.418612] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.418877] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.419135] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleting the datastore file [datastore1] 4c15a9f8-4dc2-48e1-a697-03298adb8527 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.419464] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-672a41b7-9fbf-4b19-8e9e-bdd318f741ec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.425770] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 900.425770] env[61998]: value = "task-1388703" [ 900.425770] env[61998]: _type = "Task" [ 900.425770] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.433411] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.441688] env[61998]: DEBUG nova.compute.manager [req-a8de91b0-1ec1-44ff-bcdd-c7551e053ac9 req-b16ab2ae-f11d-47a1-844c-072d2f5d5c9b service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Received event network-vif-unplugged-089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 900.441830] env[61998]: DEBUG oslo_concurrency.lockutils [req-a8de91b0-1ec1-44ff-bcdd-c7551e053ac9 req-b16ab2ae-f11d-47a1-844c-072d2f5d5c9b service nova] Acquiring lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.442112] env[61998]: DEBUG oslo_concurrency.lockutils [req-a8de91b0-1ec1-44ff-bcdd-c7551e053ac9 req-b16ab2ae-f11d-47a1-844c-072d2f5d5c9b service nova] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.442251] env[61998]: DEBUG oslo_concurrency.lockutils [req-a8de91b0-1ec1-44ff-bcdd-c7551e053ac9 req-b16ab2ae-f11d-47a1-844c-072d2f5d5c9b service nova] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.442427] env[61998]: DEBUG nova.compute.manager [req-a8de91b0-1ec1-44ff-bcdd-c7551e053ac9 req-b16ab2ae-f11d-47a1-844c-072d2f5d5c9b service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] No waiting events found dispatching network-vif-unplugged-089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 900.442596] env[61998]: WARNING nova.compute.manager [req-a8de91b0-1ec1-44ff-bcdd-c7551e053ac9 req-b16ab2ae-f11d-47a1-844c-072d2f5d5c9b service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Received unexpected event network-vif-unplugged-089c550f-d232-4727-b576-df921335d3e4 for instance with vm_state shelved and task_state shelving_offloading. [ 900.476083] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388699, 'name': ReconfigVM_Task, 'duration_secs': 0.471079} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.476343] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Reconfigured VM instance instance-00000052 to attach disk [datastore1] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.476989] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47966645-3566-496d-ac25-01cd03595ea7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.485533] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 900.485533] env[61998]: value = "task-1388704" [ 900.485533] env[61998]: _type = "Task" [ 900.485533] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.490676] env[61998]: INFO nova.compute.manager [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Took 15.58 seconds to build instance. [ 900.496380] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388704, 'name': Rename_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.535779] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5a76b314-2645-4215-85e8-f04da7b0d80a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c51f684b-84f0-42b3-acf9-9e8317b10cb6-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.550s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.649708] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.676228] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525f8665-9ef2-f9e7-7b3b-2f4c07d1e648, 'name': SearchDatastore_Task, 'duration_secs': 0.012868} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.679732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.680011] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 6e71b3c4-bac7-455c-94fd-2a9bc5128132/6e71b3c4-bac7-455c-94fd-2a9bc5128132.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.680290] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c675fa-2b21-e5e2-b35b-913bd0b22e78, 'name': SearchDatastore_Task, 'duration_secs': 0.014635} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.680490] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5250d17f-eeb7-4a2e-bfa1-bddf349c36d6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.682768] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "dadd9985-bca3-4207-927f-9490e0ae3f10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.682992] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.683254] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "dadd9985-bca3-4207-927f-9490e0ae3f10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.683468] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.683643] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.685175] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a01e8117-f29c-4ca2-b8dc-3bc4b49873d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.687471] env[61998]: INFO nova.compute.manager [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Terminating instance [ 900.691175] env[61998]: DEBUG nova.compute.manager [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 900.691373] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.692378] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c8058a-f234-4650-ba1e-3a1f78a12ba6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.696906] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 900.696906] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5217afdb-9028-b20e-8b90-039213678e68" [ 900.696906] env[61998]: _type = "Task" [ 900.696906] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.697178] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 900.697178] env[61998]: value = "task-1388705" [ 900.697178] env[61998]: _type = "Task" [ 900.697178] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.705137] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.706014] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceffbbbf-ef0c-4fe5-b717-680ac73a90af {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.714330] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5217afdb-9028-b20e-8b90-039213678e68, 'name': SearchDatastore_Task, 'duration_secs': 0.010339} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.714597] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.715253] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.715543] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.715814] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a981ce4b-222d-43f1-9a47-34c158abc861 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.722104] env[61998]: DEBUG oslo_vmware.api [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 900.722104] env[61998]: value = "task-1388706" [ 900.722104] env[61998]: _type = "Task" [ 900.722104] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.728309] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 900.728309] env[61998]: value = "task-1388707" [ 900.728309] env[61998]: _type = "Task" [ 900.728309] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.736730] env[61998]: DEBUG oslo_vmware.api [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.742077] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.905849] env[61998]: DEBUG nova.network.neutron [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.935642] env[61998]: DEBUG oslo_vmware.api [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148345} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.935973] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.936187] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.936402] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.966409] env[61998]: INFO nova.scheduler.client.report [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted allocations for instance 4c15a9f8-4dc2-48e1-a697-03298adb8527 [ 900.992578] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a0c1ed1a-51df-412d-8929-e7db96902b94 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "2914460e-39e5-495b-96d8-b3580d0318d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.091s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.999509] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388704, 'name': Rename_Task, 'duration_secs': 0.15491} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.999696] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.000066] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3bd2e8e-26c7-4ea7-81a5-4dbcbe737121 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.008813] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 901.008813] env[61998]: value = "task-1388708" [ 901.008813] env[61998]: _type = "Task" [ 901.008813] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.019989] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.129095] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ba5b8eae-4cd0-40e6-a4da-c16d0f9d6495 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "interface-2914460e-39e5-495b-96d8-b3580d0318d6-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.129095] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ba5b8eae-4cd0-40e6-a4da-c16d0f9d6495 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "interface-2914460e-39e5-495b-96d8-b3580d0318d6-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.129570] env[61998]: DEBUG nova.objects.instance [None req-ba5b8eae-4cd0-40e6-a4da-c16d0f9d6495 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lazy-loading 'flavor' on Instance uuid 2914460e-39e5-495b-96d8-b3580d0318d6 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.152496] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388701, 'name': ReconfigVM_Task, 'duration_secs': 0.791826} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.152808] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Reconfigured VM instance instance-0000004f to attach disk [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 901.153731] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46f27d7-cbbb-4cc9-8ee9-cf0b9f50e845 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.182173] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8d45b59-de95-4f64-b07b-4e63a3a0150c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.199359] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 901.199359] env[61998]: value = "task-1388709" [ 901.199359] env[61998]: _type = "Task" [ 901.199359] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.211974] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388705, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.215729] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388709, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.233049] env[61998]: DEBUG oslo_vmware.api [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388706, 'name': PowerOffVM_Task, 'duration_secs': 0.204236} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.236604] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.236807] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.237119] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc08d494-008d-412c-8c25-c1fb07c6145e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.245446] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388707, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.315152] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.315631] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.315911] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Deleting the datastore file [datastore2] dadd9985-bca3-4207-927f-9490e0ae3f10 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.316272] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b3a3b0c-8791-4665-8c50-0d2cfe425b33 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.323249] env[61998]: DEBUG oslo_vmware.api [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for the task: (returnval){ [ 901.323249] env[61998]: value = "task-1388711" [ 901.323249] env[61998]: _type = "Task" [ 901.323249] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.332450] env[61998]: DEBUG oslo_vmware.api [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.408515] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.472073] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.472182] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.472349] env[61998]: DEBUG nova.objects.instance [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'resources' on Instance uuid 4c15a9f8-4dc2-48e1-a697-03298adb8527 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.521759] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388708, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.633906] env[61998]: DEBUG nova.objects.instance [None req-ba5b8eae-4cd0-40e6-a4da-c16d0f9d6495 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lazy-loading 'pci_requests' on Instance uuid 2914460e-39e5-495b-96d8-b3580d0318d6 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.711519] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388709, 'name': ReconfigVM_Task, 'duration_secs': 0.202415} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.714700] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.714959] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608859} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.715189] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce6c0b69-7aec-4bb1-9ca8-aed14a4ce84d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.716638] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 6e71b3c4-bac7-455c-94fd-2a9bc5128132/6e71b3c4-bac7-455c-94fd-2a9bc5128132.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.716859] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.717095] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d9a318f-96ba-4c3f-b256-58880efe39a1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.723738] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 901.723738] env[61998]: value = "task-1388712" [ 901.723738] env[61998]: _type = "Task" [ 901.723738] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.724925] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 901.724925] env[61998]: value = "task-1388713" [ 901.724925] env[61998]: _type = "Task" [ 901.724925] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.742213] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.743405] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.749349] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388707, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.783941} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.749789] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.750070] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.750269] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47a68a73-7038-460a-957b-b72d8ae03ce4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.760020] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 901.760020] env[61998]: value = "task-1388714" [ 901.760020] env[61998]: _type = "Task" [ 901.760020] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.768616] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.832928] env[61998]: DEBUG oslo_vmware.api [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Task: {'id': task-1388711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.428277} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.833215] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.833411] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.833592] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.833766] env[61998]: INFO nova.compute.manager [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Took 1.14 seconds to destroy the instance on the hypervisor. [ 901.834171] env[61998]: DEBUG oslo.service.loopingcall [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.834400] env[61998]: DEBUG nova.compute.manager [-] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 901.834500] env[61998]: DEBUG nova.network.neutron [-] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.977922] env[61998]: DEBUG nova.objects.instance [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'numa_topology' on Instance uuid 4c15a9f8-4dc2-48e1-a697-03298adb8527 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.022428] env[61998]: DEBUG oslo_vmware.api [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388708, 'name': PowerOnVM_Task, 'duration_secs': 0.780259} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.022754] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.022966] env[61998]: INFO nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Took 8.54 seconds to spawn the instance on the hypervisor. [ 902.023164] env[61998]: DEBUG nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 902.023939] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a7dd57-05ec-4bd9-9707-09ecba9ff95e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.105146] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.106097] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd938e9-bcbf-4f8f-92f8-093444ae6e4a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.114075] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.114337] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e6901df-c56f-438e-944e-77b8f258fba9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.136714] env[61998]: DEBUG nova.objects.base [None req-ba5b8eae-4cd0-40e6-a4da-c16d0f9d6495 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Object Instance<2914460e-39e5-495b-96d8-b3580d0318d6> lazy-loaded attributes: flavor,pci_requests {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 902.137394] env[61998]: DEBUG nova.network.neutron [None req-ba5b8eae-4cd0-40e6-a4da-c16d0f9d6495 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.196938] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.197199] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.198046] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleting the datastore file [datastore2] 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.198046] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b3890c8-f4bd-4372-8d56-0c39780fd231 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.207125] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 902.207125] env[61998]: value = "task-1388716" [ 902.207125] env[61998]: _type = "Task" [ 902.207125] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.215286] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.236838] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388712, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.239579] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074084} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.239827] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.240583] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010ea73b-0396-4540-800e-ee38b14c4285 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.262498] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 6e71b3c4-bac7-455c-94fd-2a9bc5128132/6e71b3c4-bac7-455c-94fd-2a9bc5128132.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.263713] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ba5b8eae-4cd0-40e6-a4da-c16d0f9d6495 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "interface-2914460e-39e5-495b-96d8-b3580d0318d6-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.135s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.265017] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49339f6f-3b32-4379-8f32-312b3d1f1fe2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.291415] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072719} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.292712] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.293345] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 902.293345] env[61998]: value = "task-1388717" [ 902.293345] env[61998]: _type = "Task" [ 902.293345] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.294047] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c0e854-c1d6-47ac-87ea-b5ba3736382b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.304331] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388717, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.322371] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.323167] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc2b48a6-f03a-4af6-87ed-933b52aa90f6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.343389] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 902.343389] env[61998]: value = "task-1388718" [ 902.343389] env[61998]: _type = "Task" [ 902.343389] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.353023] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388718, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.480838] env[61998]: DEBUG nova.objects.base [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Object Instance<4c15a9f8-4dc2-48e1-a697-03298adb8527> lazy-loaded attributes: resources,numa_topology {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 902.544572] env[61998]: INFO nova.compute.manager [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Took 14.64 seconds to build instance. [ 902.649179] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431e7b36-d6da-492f-b1fc-ec8a91db7feb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.657302] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a6c306-5875-47fc-85e8-f50ee5529e9a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.694717] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52daac8a-71bf-4b7c-83e8-48bdff9ce236 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.703623] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be615aa-b54f-4e21-bdfa-468d336ffa6e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.724448] env[61998]: DEBUG nova.compute.provider_tree [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.730079] env[61998]: DEBUG oslo_vmware.api [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388716, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319599} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.733241] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.733443] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.733623] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.741347] env[61998]: DEBUG oslo_vmware.api [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388712, 'name': PowerOnVM_Task, 'duration_secs': 0.835837} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.741601] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.744236] env[61998]: DEBUG nova.compute.manager [None req-d8a65b91-09a6-4807-9c2e-c0e994b960b3 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 902.745012] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecac0745-dfa4-47a8-8b6d-1787b1745982 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.765885] env[61998]: INFO nova.scheduler.client.report [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted allocations for instance 2d0b199f-e0f1-42e0-afb5-e08602aebf01 [ 902.808142] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388717, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.855148] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388718, 'name': ReconfigVM_Task, 'duration_secs': 0.325708} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.855497] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 23265b26-7579-4514-a172-8cf2ec124ec6/23265b26-7579-4514-a172-8cf2ec124ec6.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.856169] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00ff103b-48b0-4fed-91eb-aa9a31b129d9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.862075] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 902.862075] env[61998]: value = "task-1388719" [ 902.862075] env[61998]: _type = "Task" [ 902.862075] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.873884] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388719, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.927834] env[61998]: DEBUG nova.compute.manager [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Received event network-changed-089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 902.928065] env[61998]: DEBUG nova.compute.manager [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Refreshing instance network info cache due to event network-changed-089c550f-d232-4727-b576-df921335d3e4. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 902.928317] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Acquiring lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.928472] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Acquired lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.928640] env[61998]: DEBUG nova.network.neutron [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Refreshing network info cache for port 089c550f-d232-4727-b576-df921335d3e4 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 902.952827] env[61998]: DEBUG nova.compute.manager [req-895f3d7a-6da0-4913-b3a7-5de18d8598fe req-e28bafdc-3410-48b9-b409-a5bdca15e256 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Received event network-vif-deleted-2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 902.953415] env[61998]: INFO nova.compute.manager [req-895f3d7a-6da0-4913-b3a7-5de18d8598fe req-e28bafdc-3410-48b9-b409-a5bdca15e256 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Neutron deleted interface 2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1; detaching it from the instance and deleting it from the info cache [ 902.953691] env[61998]: DEBUG nova.network.neutron [req-895f3d7a-6da0-4913-b3a7-5de18d8598fe req-e28bafdc-3410-48b9-b409-a5bdca15e256 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.023666] env[61998]: DEBUG nova.network.neutron [-] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.048836] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d182e19-504b-41e3-9c29-254abb9c3516 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "aaee1558-f98b-4006-93b6-69434c78e79c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.157s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.232335] env[61998]: DEBUG nova.scheduler.client.report [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 903.271708] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.307802] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388717, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.374716] env[61998]: DEBUG oslo_concurrency.lockutils [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.375007] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388719, 'name': Rename_Task, 'duration_secs': 0.14246} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.375456] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.375696] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22d368e7-c00f-4ed9-b8d7-1297a05a4458 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.381914] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 903.381914] env[61998]: value = "task-1388720" [ 903.381914] env[61998]: _type = "Task" [ 903.381914] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.390540] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388720, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.457421] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2cf74c4b-dd50-4513-bc9e-15cef8d2cfd4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.469667] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5f4750-ac5e-437a-8dd9-b5981c84c8db {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.497395] env[61998]: DEBUG nova.compute.manager [req-895f3d7a-6da0-4913-b3a7-5de18d8598fe req-e28bafdc-3410-48b9-b409-a5bdca15e256 service nova] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Detach interface failed, port_id=2bb3f6d1-79f5-4ed3-b2a5-fd1f2fc1cad1, reason: Instance dadd9985-bca3-4207-927f-9490e0ae3f10 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 903.526934] env[61998]: INFO nova.compute.manager [-] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Took 1.69 seconds to deallocate network for instance. [ 903.652421] env[61998]: DEBUG nova.network.neutron [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Updated VIF entry in instance network info cache for port 089c550f-d232-4727-b576-df921335d3e4. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 903.652421] env[61998]: DEBUG nova.network.neutron [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Updating instance_info_cache with network_info: [{"id": "089c550f-d232-4727-b576-df921335d3e4", "address": "fa:16:3e:93:ad:b8", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": null, "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap089c550f-d2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.739490] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.267s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.742055] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.470s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.742362] env[61998]: DEBUG nova.objects.instance [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'resources' on Instance uuid 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.810568] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388717, 'name': ReconfigVM_Task, 'duration_secs': 1.421163} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.810568] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 6e71b3c4-bac7-455c-94fd-2a9bc5128132/6e71b3c4-bac7-455c-94fd-2a9bc5128132.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.811168] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-faa741b2-6095-4ea6-af5d-c7ba20216c7b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.817894] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 903.817894] env[61998]: value = "task-1388721" [ 903.817894] env[61998]: _type = "Task" [ 903.817894] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.828170] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388721, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.876401] env[61998]: INFO nova.compute.manager [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Rebuilding instance [ 903.892224] env[61998]: DEBUG oslo_vmware.api [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388720, 'name': PowerOnVM_Task, 'duration_secs': 0.453811} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.894343] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.894557] env[61998]: DEBUG nova.compute.manager [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 903.895989] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc55b93e-8d62-49c6-b57f-a7f40f88eb69 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.929030] env[61998]: DEBUG nova.compute.manager [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 903.930926] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddda923e-58e9-4cce-a714-9d7c425c550b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.034149] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.155425] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Releasing lock "refresh_cache-4c15a9f8-4dc2-48e1-a697-03298adb8527" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.155739] env[61998]: DEBUG nova.compute.manager [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-vif-unplugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 904.155944] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.156171] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.156367] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.156556] env[61998]: DEBUG nova.compute.manager [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] No waiting events found dispatching network-vif-unplugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 904.156734] env[61998]: WARNING nova.compute.manager [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received unexpected event network-vif-unplugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 for instance with vm_state shelved_offloaded and task_state None. [ 904.156902] env[61998]: DEBUG nova.compute.manager [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 904.157076] env[61998]: DEBUG nova.compute.manager [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing instance network info cache due to event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 904.157274] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.157446] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.157691] env[61998]: DEBUG nova.network.neutron [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.215826] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-c51f684b-84f0-42b3-acf9-9e8317b10cb6-95b14cbf-3b14-4a08-a168-03339234265d" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.216099] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c51f684b-84f0-42b3-acf9-9e8317b10cb6-95b14cbf-3b14-4a08-a168-03339234265d" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.246671] env[61998]: DEBUG nova.objects.instance [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'numa_topology' on Instance uuid 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.249370] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e092aab2-2101-4450-a5b7-1436e54fbbd4 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.048s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.250306] env[61998]: DEBUG oslo_concurrency.lockutils [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.876s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.250541] env[61998]: DEBUG oslo_concurrency.lockutils [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.250728] env[61998]: DEBUG oslo_concurrency.lockutils [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.250932] env[61998]: DEBUG oslo_concurrency.lockutils [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.255473] env[61998]: INFO nova.compute.manager [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Terminating instance [ 904.257575] env[61998]: DEBUG nova.compute.manager [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 904.257814] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.258105] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a3e7eef-799a-4777-ac79-28fda39fc0ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.270242] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3570ab-5ba0-4839-a5e4-3877e2026e7c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.299980] env[61998]: WARNING nova.virt.vmwareapi.vmops [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c15a9f8-4dc2-48e1-a697-03298adb8527 could not be found. [ 904.300215] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.300396] env[61998]: INFO nova.compute.manager [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Took 0.04 seconds to destroy the instance on the hypervisor. [ 904.300646] env[61998]: DEBUG oslo.service.loopingcall [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.300956] env[61998]: DEBUG nova.compute.manager [-] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 904.301027] env[61998]: DEBUG nova.network.neutron [-] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.327844] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388721, 'name': Rename_Task, 'duration_secs': 0.228128} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.328148] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.328410] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c43e5b1d-4ef7-4bb8-b3aa-937ac1e2b0be {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.332554] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "2914460e-39e5-495b-96d8-b3580d0318d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.332786] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "2914460e-39e5-495b-96d8-b3580d0318d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.332987] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "2914460e-39e5-495b-96d8-b3580d0318d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.333217] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "2914460e-39e5-495b-96d8-b3580d0318d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.333412] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "2914460e-39e5-495b-96d8-b3580d0318d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.336180] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 904.336180] env[61998]: value = "task-1388722" [ 904.336180] env[61998]: _type = "Task" [ 904.336180] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.336682] env[61998]: INFO nova.compute.manager [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Terminating instance [ 904.341463] env[61998]: DEBUG nova.compute.manager [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 904.341691] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.342468] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc46c0d8-6503-4603-8a76-75094b846c10 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.352461] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388722, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.355361] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.355595] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9e690a0-f4e6-482f-b105-6366827891ce {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.362047] env[61998]: DEBUG oslo_vmware.api [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 904.362047] env[61998]: value = "task-1388723" [ 904.362047] env[61998]: _type = "Task" [ 904.362047] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.370386] env[61998]: DEBUG oslo_vmware.api [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.417034] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.721102] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.721102] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.721944] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3384a2d-536e-49ca-a6fb-d66f913d9896 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.743418] env[61998]: DEBUG nova.network.neutron [-] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.744838] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60970bfe-750e-4281-9f54-486159452476 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.768969] env[61998]: DEBUG nova.objects.base [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Object Instance<2d0b199f-e0f1-42e0-afb5-e08602aebf01> lazy-loaded attributes: resources,numa_topology {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 904.778209] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Reconfiguring VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 904.779355] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6381938-7a8b-41a3-92b8-70b38c244274 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.808411] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 904.808411] env[61998]: value = "task-1388724" [ 904.808411] env[61998]: _type = "Task" [ 904.808411] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.820993] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.848554] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388722, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.875978] env[61998]: DEBUG oslo_vmware.api [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388723, 'name': PowerOffVM_Task, 'duration_secs': 0.211231} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.876343] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 904.876537] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 904.877136] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba0b2f55-6cd1-4aee-9ac8-de4b75af7c8f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.946151] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 904.946151] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 904.946795] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Deleting the datastore file [datastore1] 2914460e-39e5-495b-96d8-b3580d0318d6 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.946795] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b397471-70d9-4e7d-9639-d0f435598aa0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.956775] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.957843] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-069d88a7-2949-4605-a170-c1a92999aeb7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.964983] env[61998]: DEBUG oslo_vmware.api [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for the task: (returnval){ [ 904.964983] env[61998]: value = "task-1388726" [ 904.964983] env[61998]: _type = "Task" [ 904.964983] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.972760] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 904.972760] env[61998]: value = "task-1388727" [ 904.972760] env[61998]: _type = "Task" [ 904.972760] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.980220] env[61998]: DEBUG oslo_vmware.api [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388726, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.982584] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a27a5d3-9066-47f0-809f-d7282ba1d0c8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.989058] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388727, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.993505] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276e1381-c05e-43a3-bf93-f92b292548b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.024072] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80434458-9329-461b-96ce-1e2c7497298b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.033982] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3927efb-1cae-4ab7-8457-2e63f20113fb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.048135] env[61998]: DEBUG nova.compute.provider_tree [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.092797] env[61998]: DEBUG nova.network.neutron [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updated VIF entry in instance network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.093348] env[61998]: DEBUG nova.network.neutron [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.147817] env[61998]: DEBUG nova.compute.manager [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 905.148094] env[61998]: DEBUG nova.compute.manager [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing instance network info cache due to event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 905.148353] env[61998]: DEBUG oslo_concurrency.lockutils [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] Acquiring lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.148525] env[61998]: DEBUG oslo_concurrency.lockutils [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] Acquired lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.148667] env[61998]: DEBUG nova.network.neutron [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.249563] env[61998]: INFO nova.compute.manager [-] [instance: 4c15a9f8-4dc2-48e1-a697-03298adb8527] Took 0.95 seconds to deallocate network for instance. [ 905.280082] env[61998]: DEBUG nova.compute.manager [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 905.280210] env[61998]: DEBUG nova.compute.manager [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing instance network info cache due to event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 905.280450] env[61998]: DEBUG oslo_concurrency.lockutils [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] Acquiring lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.320868] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.335825] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.349145] env[61998]: DEBUG oslo_vmware.api [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388722, 'name': PowerOnVM_Task, 'duration_secs': 0.590074} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.349428] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.349638] env[61998]: INFO nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Took 9.60 seconds to spawn the instance on the hypervisor. [ 905.349809] env[61998]: DEBUG nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 905.350720] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757acab9-b28f-4873-8d0c-88ea6b631dd6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.481590] env[61998]: DEBUG oslo_vmware.api [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Task: {'id': task-1388726, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231698} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.482245] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 905.482465] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 905.482653] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 905.482844] env[61998]: INFO nova.compute.manager [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 905.483124] env[61998]: DEBUG oslo.service.loopingcall [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.483328] env[61998]: DEBUG nova.compute.manager [-] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 905.483472] env[61998]: DEBUG nova.network.neutron [-] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 905.489093] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388727, 'name': PowerOffVM_Task, 'duration_secs': 0.170637} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.490453] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.490725] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.491553] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9f78ab-07ed-47c0-b799-f6a9f5574b41 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.498412] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.498660] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-779e20df-ab24-4c30-9111-e84eb16b4ee9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.530021] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.530021] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.530021] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Deleting the datastore file [datastore1] aaee1558-f98b-4006-93b6-69434c78e79c {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.530021] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd2695b3-b199-426a-a895-05bc85d6baa7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.534499] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 905.534499] env[61998]: value = "task-1388729" [ 905.534499] env[61998]: _type = "Task" [ 905.534499] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.541939] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388729, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.550954] env[61998]: DEBUG nova.scheduler.client.report [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 905.596768] env[61998]: DEBUG oslo_concurrency.lockutils [req-3340cd10-05cf-4c8a-82a1-1cd6c5b19116 req-11b0f5f8-8247-4897-a3ef-4b4c98350dcb service nova] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.827215] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.872577] env[61998]: INFO nova.compute.manager [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Took 16.56 seconds to build instance. [ 905.896102] env[61998]: DEBUG nova.network.neutron [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updated VIF entry in instance network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.896102] env[61998]: DEBUG nova.network.neutron [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.938590] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "23265b26-7579-4514-a172-8cf2ec124ec6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.939143] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "23265b26-7579-4514-a172-8cf2ec124ec6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.939143] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "23265b26-7579-4514-a172-8cf2ec124ec6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.939143] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "23265b26-7579-4514-a172-8cf2ec124ec6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.939263] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "23265b26-7579-4514-a172-8cf2ec124ec6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.944405] env[61998]: INFO nova.compute.manager [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Terminating instance [ 905.952470] env[61998]: DEBUG nova.compute.manager [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 905.952720] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.957155] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdac568-32e1-45c8-a1db-ed18f94d79f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.966751] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.966751] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f188d8a-0553-48ad-a88e-4fcff80dc27c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.974997] env[61998]: DEBUG oslo_vmware.api [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 905.974997] env[61998]: value = "task-1388730" [ 905.974997] env[61998]: _type = "Task" [ 905.974997] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.986293] env[61998]: DEBUG oslo_vmware.api [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388730, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.045668] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388729, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156322} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.046148] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.046372] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.046709] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.059918] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.314s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.059918] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.025s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.059918] env[61998]: DEBUG nova.objects.instance [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lazy-loading 'resources' on Instance uuid dadd9985-bca3-4207-927f-9490e0ae3f10 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.280646] env[61998]: DEBUG oslo_concurrency.lockutils [None req-000ec76a-ab3d-4722-b139-b00966c4ffd0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "4c15a9f8-4dc2-48e1-a697-03298adb8527" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.030s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.319996] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.322392] env[61998]: DEBUG nova.network.neutron [-] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.375197] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cce6972c-978e-4d3a-88f6-28dba4cd3b03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.084s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.400334] env[61998]: DEBUG oslo_concurrency.lockutils [req-46132717-f21d-499c-b6b7-b320eb33c95c req-652e004e-973c-480e-9abf-3aa5711b97fc service nova] Releasing lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.400334] env[61998]: DEBUG oslo_concurrency.lockutils [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] Acquired lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.400334] env[61998]: DEBUG nova.network.neutron [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.485985] env[61998]: DEBUG oslo_vmware.api [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388730, 'name': PowerOffVM_Task, 'duration_secs': 0.196939} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.486474] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.486969] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 906.487370] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b31cdc5-02c2-44f6-beec-fe8df9d868c0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.507479] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.508590] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.557967] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.558220] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.558409] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleting the datastore file [datastore2] 23265b26-7579-4514-a172-8cf2ec124ec6 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.558665] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7894ce5c-d211-4592-84e8-89eb0a8fbec6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.568942] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc67e351-854f-43c8-9de1-49758876a8fd tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.477s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.569800] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.234s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.569980] env[61998]: INFO nova.compute.manager [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Unshelving [ 906.572694] env[61998]: DEBUG oslo_vmware.api [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 906.572694] env[61998]: value = "task-1388732" [ 906.572694] env[61998]: _type = "Task" [ 906.572694] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.581677] env[61998]: DEBUG oslo_vmware.api [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.729089] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2020c428-e3c0-4c1a-a625-9f6c4d97352d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.737379] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7e3ea3-ca02-47a5-afd2-9ec717b1aa32 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.769786] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafa65d3-16d2-4fdb-b71a-103ab0eef595 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.777216] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281ac813-c6e2-4ddd-91f7-93409b31ed60 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.790879] env[61998]: DEBUG nova.compute.provider_tree [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.819482] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.825009] env[61998]: INFO nova.compute.manager [-] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Took 1.34 seconds to deallocate network for instance. [ 906.870128] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.870424] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.870708] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.870859] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.871027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.873025] env[61998]: INFO nova.compute.manager [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Terminating instance [ 906.874831] env[61998]: DEBUG nova.compute.manager [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 906.875051] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.875960] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbbc329-3d3f-4683-afb9-dede726051b6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.883856] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.884104] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-631f1627-e241-4c56-8cfd-d6e949e0c719 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.890927] env[61998]: DEBUG oslo_vmware.api [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 906.890927] env[61998]: value = "task-1388733" [ 906.890927] env[61998]: _type = "Task" [ 906.890927] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.898254] env[61998]: DEBUG oslo_vmware.api [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388733, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.012275] env[61998]: DEBUG nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 907.096189] env[61998]: DEBUG oslo_vmware.api [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435942} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.098537] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.099232] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.099232] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.099232] env[61998]: INFO nova.compute.manager [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 907.099533] env[61998]: DEBUG oslo.service.loopingcall [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.099740] env[61998]: DEBUG nova.compute.manager [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 907.099840] env[61998]: DEBUG nova.network.neutron [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 907.111779] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 907.111779] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 907.111908] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.112070] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 907.113289] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.113289] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 907.113289] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 907.113289] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 907.113289] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 907.113289] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 907.113289] env[61998]: DEBUG nova.virt.hardware [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.116871] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57366fc4-d8d3-4784-9660-5842e0e5a110 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.127927] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750e313a-b31b-4bbc-b80f-2aa95ac67808 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.145703] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Instance VIF info [] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.152488] env[61998]: DEBUG oslo.service.loopingcall [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.152796] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.153121] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-baf933e1-a72b-4e30-a729-dd89044c89f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.176021] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.176021] env[61998]: value = "task-1388734" [ 907.176021] env[61998]: _type = "Task" [ 907.176021] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.181014] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388734, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.236238] env[61998]: DEBUG nova.network.neutron [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updated VIF entry in instance network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 907.236699] env[61998]: DEBUG nova.network.neutron [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.297021] env[61998]: DEBUG nova.scheduler.client.report [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 907.310866] env[61998]: DEBUG nova.compute.manager [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Received event network-vif-deleted-47933687-23f9-4630-a0bb-9af926699f03 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 907.311229] env[61998]: DEBUG nova.compute.manager [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 907.311421] env[61998]: DEBUG nova.compute.manager [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing instance network info cache due to event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 907.311615] env[61998]: DEBUG oslo_concurrency.lockutils [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] Acquiring lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.323571] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.330935] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.400748] env[61998]: DEBUG oslo_vmware.api [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388733, 'name': PowerOffVM_Task, 'duration_secs': 0.181923} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.401044] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.401260] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.401574] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26a2871f-d0ec-4a6f-a068-7fa94d79eb03 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.540679] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.586762] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "93df4e9a-29d2-4551-9bda-58b02163c116" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.587027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.589937] env[61998]: DEBUG nova.compute.utils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 907.595096] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.595096] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.595096] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleting the datastore file [datastore1] 6e71b3c4-bac7-455c-94fd-2a9bc5128132 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.595267] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e55cc93-f65f-4787-8361-6e89e067af0b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.602271] env[61998]: DEBUG oslo_vmware.api [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 907.602271] env[61998]: value = "task-1388736" [ 907.602271] env[61998]: _type = "Task" [ 907.602271] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.612084] env[61998]: DEBUG oslo_vmware.api [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.684793] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388734, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.740106] env[61998]: DEBUG oslo_concurrency.lockutils [req-5b052b84-278b-4001-abab-1d9312159540 req-e6ba60d0-7354-4ce2-8250-39c6a9553596 service nova] Releasing lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.740617] env[61998]: DEBUG oslo_concurrency.lockutils [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] Acquired lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.740824] env[61998]: DEBUG nova.network.neutron [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.801286] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.804125] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.387s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.804329] env[61998]: DEBUG nova.objects.instance [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61998) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 907.825622] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.826593] env[61998]: INFO nova.scheduler.client.report [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Deleted allocations for instance dadd9985-bca3-4207-927f-9490e0ae3f10 [ 907.974624] env[61998]: DEBUG nova.network.neutron [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.098024] env[61998]: DEBUG nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 908.098417] env[61998]: INFO nova.virt.block_device [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Booting with volume 834152ba-512a-44f5-b453-523da9f699e7 at /dev/sdb [ 908.115096] env[61998]: DEBUG oslo_vmware.api [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156755} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.115380] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.115588] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.115810] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.116701] env[61998]: INFO nova.compute.manager [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Took 1.24 seconds to destroy the instance on the hypervisor. [ 908.116701] env[61998]: DEBUG oslo.service.loopingcall [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.116701] env[61998]: DEBUG nova.compute.manager [-] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 908.116701] env[61998]: DEBUG nova.network.neutron [-] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.144394] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fcebf42-a764-45b6-8542-4d8f85043019 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.153253] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3faf65-0707-4baa-9b21-362868d11ed2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.181329] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f991cb8-9bd3-46b6-aace-32f04268f8b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.188499] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388734, 'name': CreateVM_Task, 'duration_secs': 0.556517} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.190916] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 908.191432] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.191599] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.191933] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 908.195020] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eebeb3d-639f-4d14-b261-6faedd0d389f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.205101] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d833dd-d002-4eba-a945-fab11b4e030a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.209733] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 908.209733] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528f517f-e546-38ef-23c3-f187344ed3aa" [ 908.209733] env[61998]: _type = "Task" [ 908.209733] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.225232] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b721d9ae-f42a-4990-a653-b220840a07c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.231084] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528f517f-e546-38ef-23c3-f187344ed3aa, 'name': SearchDatastore_Task, 'duration_secs': 0.01004} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.231754] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.232011] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 908.232263] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.232412] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.232592] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 908.232851] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b32306f-d449-49f5-b311-35cf4d85158d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.240384] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76bb1b1a-1eaa-4c23-ac18-0f5193db0a29 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.245433] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 908.245641] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 908.246355] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e873b13-93dc-4793-94f8-7ab180f51dd4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.252389] env[61998]: DEBUG nova.virt.block_device [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating existing volume attachment record: d9075c39-55c5-4a93-b086-3378b4efdb94 {{(pid=61998) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 908.258727] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 908.258727] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523884c8-1d6d-76d5-ffac-ea1a088ab595" [ 908.258727] env[61998]: _type = "Task" [ 908.258727] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.265077] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523884c8-1d6d-76d5-ffac-ea1a088ab595, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.322645] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.333413] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a4b4126b-1035-45b6-9518-247b838f9fce tempest-AttachVolumeNegativeTest-831212951 tempest-AttachVolumeNegativeTest-831212951-project-member] Lock "dadd9985-bca3-4207-927f-9490e0ae3f10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.650s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.478349] env[61998]: INFO nova.compute.manager [-] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Took 1.38 seconds to deallocate network for instance. [ 908.560480] env[61998]: DEBUG nova.network.neutron [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updated VIF entry in instance network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.560903] env[61998]: DEBUG nova.network.neutron [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.624233] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.768027] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523884c8-1d6d-76d5-ffac-ea1a088ab595, 'name': SearchDatastore_Task, 'duration_secs': 0.008934} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.768849] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-536eebad-c127-4a28-a623-89828d62316e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.774099] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 908.774099] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b00aa1-8957-f7f6-82b9-693ffbb51549" [ 908.774099] env[61998]: _type = "Task" [ 908.774099] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.782714] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b00aa1-8957-f7f6-82b9-693ffbb51549, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.814608] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e52e3942-51c9-4f9d-86c2-e03a4589ed3f tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.815873] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.485s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.816158] env[61998]: DEBUG nova.objects.instance [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lazy-loading 'resources' on Instance uuid 2914460e-39e5-495b-96d8-b3580d0318d6 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.827835] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.896110] env[61998]: DEBUG nova.network.neutron [-] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.943401] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "b9c5feec-7bfd-470e-9833-b45403195e83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.943817] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "b9c5feec-7bfd-470e-9833-b45403195e83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.945333] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "b9c5feec-7bfd-470e-9833-b45403195e83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.946087] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "b9c5feec-7bfd-470e-9833-b45403195e83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.946330] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "b9c5feec-7bfd-470e-9833-b45403195e83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.951716] env[61998]: INFO nova.compute.manager [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Terminating instance [ 908.957684] env[61998]: DEBUG nova.compute.manager [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 908.957991] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 908.959027] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144cd1a9-b278-42b9-a5f3-2ce3dd512cd6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.975828] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.975828] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc1753c7-61ca-46f5-873e-27a8060649d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.986979] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.992965] env[61998]: DEBUG oslo_vmware.api [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 908.992965] env[61998]: value = "task-1388741" [ 908.992965] env[61998]: _type = "Task" [ 908.992965] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.015300] env[61998]: DEBUG oslo_vmware.api [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.065458] env[61998]: DEBUG oslo_concurrency.lockutils [req-83f3e1f6-227b-442a-b03d-25f04fd46327 req-38d49471-fc8d-464a-8a60-2b16590e4373 service nova] Releasing lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.284727] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b00aa1-8957-f7f6-82b9-693ffbb51549, 'name': SearchDatastore_Task, 'duration_secs': 0.009309} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.285024] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.285304] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 909.285574] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b6ba677-fa13-40d5-88a2-dfbcbce3b557 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.291932] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 909.291932] env[61998]: value = "task-1388742" [ 909.291932] env[61998]: _type = "Task" [ 909.291932] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.299463] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.327414] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.346848] env[61998]: DEBUG nova.compute.manager [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 909.347101] env[61998]: DEBUG nova.compute.manager [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing instance network info cache due to event network-changed-c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 909.347341] env[61998]: DEBUG oslo_concurrency.lockutils [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] Acquiring lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.347485] env[61998]: DEBUG oslo_concurrency.lockutils [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] Acquired lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.347653] env[61998]: DEBUG nova.network.neutron [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Refreshing network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.399563] env[61998]: INFO nova.compute.manager [-] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Took 1.28 seconds to deallocate network for instance. [ 909.461639] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094a93e0-eca8-48e9-bf1a-a410f289ded9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.472429] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecca347-6762-4e87-a61b-83e1265a2141 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.508750] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2765d7f-5d88-4b2f-9665-134803455e63 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.521009] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3853bbf1-19e5-49fa-bb61-3edc1ae90f18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.526273] env[61998]: DEBUG oslo_vmware.api [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388741, 'name': PowerOffVM_Task, 'duration_secs': 0.226137} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.526625] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.526842] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 909.527569] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8b53a5e-04bc-41ab-b6d2-12be348ed8b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.541527] env[61998]: DEBUG nova.compute.provider_tree [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.685777] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 909.686022] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 909.686218] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Deleting the datastore file [datastore1] b9c5feec-7bfd-470e-9833-b45403195e83 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.686601] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b965c44-f62e-4b8a-a679-78cf0b8df39c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.694554] env[61998]: DEBUG oslo_vmware.api [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for the task: (returnval){ [ 909.694554] env[61998]: value = "task-1388744" [ 909.694554] env[61998]: _type = "Task" [ 909.694554] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.703832] env[61998]: DEBUG oslo_vmware.api [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.801973] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388742, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456442} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.802307] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.802575] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.802865] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04070e00-798c-49b3-9161-f55423e2b0ad {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.809505] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 909.809505] env[61998]: value = "task-1388745" [ 909.809505] env[61998]: _type = "Task" [ 909.809505] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.818535] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388745, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.827259] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.905392] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.044680] env[61998]: DEBUG nova.scheduler.client.report [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 910.090010] env[61998]: DEBUG nova.network.neutron [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updated VIF entry in instance network info cache for port c0165176-8b9e-4fb8-ba3f-c8b58e45c287. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.090520] env[61998]: DEBUG nova.network.neutron [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [{"id": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "address": "fa:16:3e:05:38:e1", "network": {"id": "46300d6e-80f7-49cb-963b-7c569251b4b2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-784470546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "2e3f7b7c926e4f778c23d04ad9598eb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0165176-8b", "ovs_interfaceid": "c0165176-8b9e-4fb8-ba3f-c8b58e45c287", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.205095] env[61998]: DEBUG oslo_vmware.api [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Task: {'id': task-1388744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203877} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.205361] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.205638] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.205841] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.206029] env[61998]: INFO nova.compute.manager [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Took 1.25 seconds to destroy the instance on the hypervisor. [ 910.206278] env[61998]: DEBUG oslo.service.loopingcall [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 910.206473] env[61998]: DEBUG nova.compute.manager [-] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 910.206568] env[61998]: DEBUG nova.network.neutron [-] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 910.319988] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388745, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065314} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.322231] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.323030] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f011b176-9161-4821-9741-7e819ea5b56d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.330613] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.352439] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.352755] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0eb1408b-c1d9-46be-84d9-dcc654bb6c4a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.373648] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 910.373648] env[61998]: value = "task-1388746" [ 910.373648] env[61998]: _type = "Task" [ 910.373648] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.381420] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388746, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.444118] env[61998]: DEBUG nova.compute.manager [req-b36cc1a3-111b-4e98-ab37-b9aaccba93d3 req-489175ae-f66e-44a9-ab32-b6032299958d service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Received event network-vif-deleted-c0165176-8b9e-4fb8-ba3f-c8b58e45c287 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 910.444183] env[61998]: INFO nova.compute.manager [req-b36cc1a3-111b-4e98-ab37-b9aaccba93d3 req-489175ae-f66e-44a9-ab32-b6032299958d service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Neutron deleted interface c0165176-8b9e-4fb8-ba3f-c8b58e45c287; detaching it from the instance and deleting it from the info cache [ 910.444356] env[61998]: DEBUG nova.network.neutron [req-b36cc1a3-111b-4e98-ab37-b9aaccba93d3 req-489175ae-f66e-44a9-ab32-b6032299958d service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.550201] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.734s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.553326] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.012s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.554916] env[61998]: INFO nova.compute.claims [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.573981] env[61998]: INFO nova.scheduler.client.report [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Deleted allocations for instance 2914460e-39e5-495b-96d8-b3580d0318d6 [ 910.593354] env[61998]: DEBUG oslo_concurrency.lockutils [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] Releasing lock "refresh_cache-b9c5feec-7bfd-470e-9833-b45403195e83" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.593597] env[61998]: DEBUG nova.compute.manager [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Received event network-vif-deleted-1cb05d6f-fb33-4e35-a7a7-862b3b11b653 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 910.593884] env[61998]: DEBUG nova.compute.manager [req-6638421f-39d2-4c9c-ad89-3a60815d63d5 req-11281db9-4c73-4059-b6af-95de06629a10 service nova] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Received event network-vif-deleted-9852375c-4fe3-4053-89ac-5a75e475ef56 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 910.830564] env[61998]: DEBUG oslo_vmware.api [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388724, 'name': ReconfigVM_Task, 'duration_secs': 5.741033} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.830922] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.831300] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Reconfigured VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 910.884401] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388746, 'name': ReconfigVM_Task, 'duration_secs': 0.256876} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.884686] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Reconfigured VM instance instance-00000052 to attach disk [datastore2] aaee1558-f98b-4006-93b6-69434c78e79c/aaee1558-f98b-4006-93b6-69434c78e79c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.885358] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69491ebc-8467-4a11-959a-9ab3c742fa25 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.891208] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 910.891208] env[61998]: value = "task-1388748" [ 910.891208] env[61998]: _type = "Task" [ 910.891208] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.899631] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388748, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.922134] env[61998]: DEBUG nova.network.neutron [-] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.946986] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3aab142d-5c5d-438c-afca-2ac31c8993b8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.956190] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4eb7216-dd46-46ff-a7b6-20e95fee03f7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.980739] env[61998]: DEBUG nova.compute.manager [req-b36cc1a3-111b-4e98-ab37-b9aaccba93d3 req-489175ae-f66e-44a9-ab32-b6032299958d service nova] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Detach interface failed, port_id=c0165176-8b9e-4fb8-ba3f-c8b58e45c287, reason: Instance b9c5feec-7bfd-470e-9833-b45403195e83 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 911.082677] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da9ad434-73cc-4b62-a547-5d3c7d97fae1 tempest-AttachInterfacesV270Test-2077099922 tempest-AttachInterfacesV270Test-2077099922-project-member] Lock "2914460e-39e5-495b-96d8-b3580d0318d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.750s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.401801] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388748, 'name': Rename_Task, 'duration_secs': 0.120764} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.402261] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.402619] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c58595b1-a129-45e2-b000-afd3f51077b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.410366] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 911.410366] env[61998]: value = "task-1388749" [ 911.410366] env[61998]: _type = "Task" [ 911.410366] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.418690] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.424464] env[61998]: INFO nova.compute.manager [-] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Took 1.22 seconds to deallocate network for instance. [ 911.726171] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d1b8b7-edde-42de-b761-74ad542761be {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.734673] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c277c2d5-80c0-470a-8c31-8eafbb80f7bf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.769296] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0d6e32-b419-4356-a3f3-effcf7eb6efe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.778449] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c139314a-58ef-4396-988d-36bbccb62aa1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.794718] env[61998]: DEBUG nova.compute.provider_tree [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.921482] env[61998]: DEBUG oslo_vmware.api [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388749, 'name': PowerOnVM_Task, 'duration_secs': 0.4078} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.921768] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.921980] env[61998]: DEBUG nova.compute.manager [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 911.922803] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d29833f-0ac2-4c3f-aeef-5f5e3e8d5a62 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.933551] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.243573] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.243573] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.243573] env[61998]: DEBUG nova.network.neutron [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.301021] env[61998]: DEBUG nova.scheduler.client.report [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 912.303138] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.303485] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.303810] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.304175] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.304467] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.308675] env[61998]: INFO nova.compute.manager [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Terminating instance [ 912.310555] env[61998]: DEBUG nova.compute.manager [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 912.310878] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.312099] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750bd01e-b678-4c5b-b008-51425efcdef8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.322663] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.322663] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bc03cb0-6fc9-474e-a8b1-940f8931e4ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.329458] env[61998]: DEBUG oslo_vmware.api [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 912.329458] env[61998]: value = "task-1388750" [ 912.329458] env[61998]: _type = "Task" [ 912.329458] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.340713] env[61998]: DEBUG oslo_vmware.api [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.444977] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.502649] env[61998]: DEBUG nova.compute.manager [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received event network-vif-deleted-95b14cbf-3b14-4a08-a168-03339234265d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 912.502649] env[61998]: INFO nova.compute.manager [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Neutron deleted interface 95b14cbf-3b14-4a08-a168-03339234265d; detaching it from the instance and deleting it from the info cache [ 912.502925] env[61998]: DEBUG nova.network.neutron [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.809803] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.810396] env[61998]: DEBUG nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 912.813837] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.190s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.816580] env[61998]: INFO nova.compute.claims [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.841931] env[61998]: DEBUG oslo_vmware.api [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388750, 'name': PowerOffVM_Task, 'duration_secs': 0.178798} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.843348] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 912.843551] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 912.843840] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efab998d-2edf-4370-8161-ac313b8ff06f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.921178] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 912.921178] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 912.921178] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleting the datastore file [datastore1] c51f684b-84f0-42b3-acf9-9e8317b10cb6 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 912.921178] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fd75f04-abd2-4524-af8d-04bdeeae5934 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.929209] env[61998]: DEBUG oslo_vmware.api [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 912.929209] env[61998]: value = "task-1388752" [ 912.929209] env[61998]: _type = "Task" [ 912.929209] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.946368] env[61998]: DEBUG oslo_vmware.api [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388752, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.007058] env[61998]: DEBUG oslo_concurrency.lockutils [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] Acquiring lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.033312] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "aaee1558-f98b-4006-93b6-69434c78e79c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.033617] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "aaee1558-f98b-4006-93b6-69434c78e79c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.033857] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "aaee1558-f98b-4006-93b6-69434c78e79c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.034090] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "aaee1558-f98b-4006-93b6-69434c78e79c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.034355] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "aaee1558-f98b-4006-93b6-69434c78e79c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.036768] env[61998]: INFO nova.compute.manager [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Terminating instance [ 913.039683] env[61998]: INFO nova.network.neutron [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Port 95b14cbf-3b14-4a08-a168-03339234265d from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 913.040332] env[61998]: DEBUG nova.network.neutron [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [{"id": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "address": "fa:16:3e:15:ac:f5", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f140c3-24", "ovs_interfaceid": "e9f140c3-2474-433a-acc9-85eb29ac21cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.043615] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "refresh_cache-aaee1558-f98b-4006-93b6-69434c78e79c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.046219] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquired lock "refresh_cache-aaee1558-f98b-4006-93b6-69434c78e79c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.046443] env[61998]: DEBUG nova.network.neutron [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.315164] env[61998]: DEBUG nova.compute.utils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 913.319516] env[61998]: DEBUG nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 913.319702] env[61998]: DEBUG nova.network.neutron [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 913.379768] env[61998]: DEBUG nova.policy [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f155bbfca47547c2bf745811003ffcec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f49104f21d7147328bcc8edee8d3cdb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 913.440693] env[61998]: DEBUG oslo_vmware.api [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388752, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197588} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.440978] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.441189] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.441368] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.441545] env[61998]: INFO nova.compute.manager [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 913.441932] env[61998]: DEBUG oslo.service.loopingcall [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 913.442041] env[61998]: DEBUG nova.compute.manager [-] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 913.442099] env[61998]: DEBUG nova.network.neutron [-] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 913.522470] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.522743] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.522951] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.523774] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.524036] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.526711] env[61998]: INFO nova.compute.manager [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Terminating instance [ 913.529083] env[61998]: DEBUG nova.compute.manager [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 913.529656] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.530409] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce751ed-0bb2-43e6-b4a6-e3bd7d382c8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.543158] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.543158] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d8910e8-bad5-467d-9366-8853945754ff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.547864] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.559067] env[61998]: DEBUG oslo_vmware.api [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 913.559067] env[61998]: value = "task-1388753" [ 913.559067] env[61998]: _type = "Task" [ 913.559067] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.570038] env[61998]: DEBUG oslo_vmware.api [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.594570] env[61998]: DEBUG nova.network.neutron [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.687263] env[61998]: DEBUG nova.network.neutron [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.829857] env[61998]: DEBUG nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 913.866525] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.872730] env[61998]: DEBUG nova.network.neutron [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Successfully created port: 9f5103b5-d9cc-4978-9140-901834a6af10 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.026110] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff009610-c5fa-43c7-ac68-2d67eabb947e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.035889] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df07891d-5436-4a3b-9026-8af49a6726bb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.073953] env[61998]: DEBUG oslo_concurrency.lockutils [None req-47045424-94d2-47c4-a51e-e41751157ac4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c51f684b-84f0-42b3-acf9-9e8317b10cb6-95b14cbf-3b14-4a08-a168-03339234265d" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.858s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.078768] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b44c01c-9af7-47bf-a1d2-ba988331e028 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.088181] env[61998]: DEBUG oslo_vmware.api [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388753, 'name': PowerOffVM_Task, 'duration_secs': 0.239323} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.090460] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.090651] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.090963] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e5c6346-e1b8-4bd2-a2bf-4e1bede27f39 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.093649] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974def4a-b977-4206-8fd3-6c1ddf18d3f1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.111039] env[61998]: DEBUG nova.compute.provider_tree [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.190189] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Releasing lock "refresh_cache-aaee1558-f98b-4006-93b6-69434c78e79c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.190346] env[61998]: DEBUG nova.compute.manager [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 914.190525] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.191437] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2757be5b-bb26-462c-a85b-6e6dadfa2438 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.202214] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.202576] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df8c9b4a-fba1-4c0f-8c39-8c0fdd460195 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.208424] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.208666] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.208666] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Deleting the datastore file [datastore2] 4ca7de74-3bcb-4da0-a2e1-573584467cc9 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.209193] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-558ec1bc-0df1-48de-90ad-2275e3333887 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.212831] env[61998]: DEBUG oslo_vmware.api [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 914.212831] env[61998]: value = "task-1388755" [ 914.212831] env[61998]: _type = "Task" [ 914.212831] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.222015] env[61998]: DEBUG oslo_vmware.api [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for the task: (returnval){ [ 914.222015] env[61998]: value = "task-1388756" [ 914.222015] env[61998]: _type = "Task" [ 914.222015] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.231089] env[61998]: DEBUG oslo_vmware.api [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.234437] env[61998]: DEBUG nova.compute.manager [req-10a2346d-9fac-42f8-9274-4df777a10374 req-9d0f6a24-0179-4d49-970c-ee2f4c47f2d3 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Received event network-vif-deleted-e9f140c3-2474-433a-acc9-85eb29ac21cc {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 914.234437] env[61998]: INFO nova.compute.manager [req-10a2346d-9fac-42f8-9274-4df777a10374 req-9d0f6a24-0179-4d49-970c-ee2f4c47f2d3 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Neutron deleted interface e9f140c3-2474-433a-acc9-85eb29ac21cc; detaching it from the instance and deleting it from the info cache [ 914.234658] env[61998]: DEBUG nova.network.neutron [req-10a2346d-9fac-42f8-9274-4df777a10374 req-9d0f6a24-0179-4d49-970c-ee2f4c47f2d3 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.239015] env[61998]: DEBUG oslo_vmware.api [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.615256] env[61998]: DEBUG nova.scheduler.client.report [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 914.689906] env[61998]: DEBUG nova.network.neutron [-] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.723850] env[61998]: DEBUG oslo_vmware.api [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388755, 'name': PowerOffVM_Task, 'duration_secs': 0.199873} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.728370] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.728370] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.728370] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5ac2fcc-3269-4083-89f3-b82ef5d1665a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.734377] env[61998]: DEBUG oslo_vmware.api [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Task: {'id': task-1388756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173468} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.734613] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.734798] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.734968] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.735156] env[61998]: INFO nova.compute.manager [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Took 1.21 seconds to destroy the instance on the hypervisor. [ 914.735509] env[61998]: DEBUG oslo.service.loopingcall [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.735622] env[61998]: DEBUG nova.compute.manager [-] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 914.735708] env[61998]: DEBUG nova.network.neutron [-] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.740408] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d3467e3-5dff-4c4c-a000-fb262a47cc92 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.750242] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe81354-0f4d-452d-9d67-0906613de299 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.765076] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.765076] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.765327] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Deleting the datastore file [datastore2] aaee1558-f98b-4006-93b6-69434c78e79c {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.766230] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7c46f8b-be15-4923-b7cf-0d7b1f8d5f8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.776023] env[61998]: DEBUG oslo_vmware.api [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for the task: (returnval){ [ 914.776023] env[61998]: value = "task-1388758" [ 914.776023] env[61998]: _type = "Task" [ 914.776023] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.784915] env[61998]: DEBUG nova.compute.manager [req-10a2346d-9fac-42f8-9274-4df777a10374 req-9d0f6a24-0179-4d49-970c-ee2f4c47f2d3 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Detach interface failed, port_id=e9f140c3-2474-433a-acc9-85eb29ac21cc, reason: Instance c51f684b-84f0-42b3-acf9-9e8317b10cb6 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 914.790461] env[61998]: DEBUG oslo_vmware.api [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.838867] env[61998]: DEBUG nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 914.868890] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 914.869187] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 914.869362] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.869563] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 914.869721] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.869887] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 914.870118] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 914.870294] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 914.870482] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 914.870654] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 914.870829] env[61998]: DEBUG nova.virt.hardware [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 914.871749] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb3e647-7313-4b92-a494-2534e5cfc757 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.881136] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a4b2e5-92bd-48b5-a645-9cc9faf06860 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.013999] env[61998]: DEBUG oslo_concurrency.lockutils [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.014457] env[61998]: DEBUG oslo_concurrency.lockutils [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.120414] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.121134] env[61998]: DEBUG nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 915.124205] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.140s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.124362] env[61998]: DEBUG nova.objects.instance [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lazy-loading 'resources' on Instance uuid 23265b26-7579-4514-a172-8cf2ec124ec6 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 915.192848] env[61998]: INFO nova.compute.manager [-] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Took 1.75 seconds to deallocate network for instance. [ 915.285705] env[61998]: DEBUG oslo_vmware.api [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Task: {'id': task-1388758, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115025} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.286079] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.286328] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.286558] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.286805] env[61998]: INFO nova.compute.manager [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Took 1.10 seconds to destroy the instance on the hypervisor. [ 915.287077] env[61998]: DEBUG oslo.service.loopingcall [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 915.287287] env[61998]: DEBUG nova.compute.manager [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 915.287384] env[61998]: DEBUG nova.network.neutron [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 915.301870] env[61998]: DEBUG nova.network.neutron [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.518657] env[61998]: DEBUG nova.compute.utils [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 915.627569] env[61998]: DEBUG nova.compute.utils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 915.634803] env[61998]: DEBUG nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 915.634803] env[61998]: DEBUG nova.network.neutron [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 915.688298] env[61998]: DEBUG nova.policy [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17f109d724201a22264aa6ee02ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82b8854f80cf48628167fd6f678d7dd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 915.704361] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.710292] env[61998]: DEBUG nova.compute.manager [req-1627adb8-e633-4ac0-baea-1ba1651469f0 req-bb9d3740-3c38-447e-89bb-1e0c3557db7a service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Received event network-vif-plugged-9f5103b5-d9cc-4978-9140-901834a6af10 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 915.710484] env[61998]: DEBUG oslo_concurrency.lockutils [req-1627adb8-e633-4ac0-baea-1ba1651469f0 req-bb9d3740-3c38-447e-89bb-1e0c3557db7a service nova] Acquiring lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.710648] env[61998]: DEBUG oslo_concurrency.lockutils [req-1627adb8-e633-4ac0-baea-1ba1651469f0 req-bb9d3740-3c38-447e-89bb-1e0c3557db7a service nova] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.710838] env[61998]: DEBUG oslo_concurrency.lockutils [req-1627adb8-e633-4ac0-baea-1ba1651469f0 req-bb9d3740-3c38-447e-89bb-1e0c3557db7a service nova] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.710995] env[61998]: DEBUG nova.compute.manager [req-1627adb8-e633-4ac0-baea-1ba1651469f0 req-bb9d3740-3c38-447e-89bb-1e0c3557db7a service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] No waiting events found dispatching network-vif-plugged-9f5103b5-d9cc-4978-9140-901834a6af10 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.711300] env[61998]: WARNING nova.compute.manager [req-1627adb8-e633-4ac0-baea-1ba1651469f0 req-bb9d3740-3c38-447e-89bb-1e0c3557db7a service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Received unexpected event network-vif-plugged-9f5103b5-d9cc-4978-9140-901834a6af10 for instance with vm_state building and task_state spawning. [ 915.737231] env[61998]: DEBUG nova.network.neutron [-] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.804301] env[61998]: DEBUG nova.network.neutron [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.813616] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec3e206-7930-447d-9155-ac77c39cca0e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.823506] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b783fa-59cc-49b0-98ec-744f7c79c768 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.828980] env[61998]: DEBUG nova.network.neutron [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Successfully updated port: 9f5103b5-d9cc-4978-9140-901834a6af10 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.856163] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebff119-f7e9-4662-8daf-b07424b1ba70 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.865410] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2f9372-9c6f-4ea7-b2f1-7b706409df7a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.879931] env[61998]: DEBUG nova.compute.provider_tree [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.024691] env[61998]: DEBUG oslo_concurrency.lockutils [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.063447] env[61998]: DEBUG nova.network.neutron [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Successfully created port: c38fbbcf-cd65-4aed-a882-c4536afc9be3 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 916.137509] env[61998]: DEBUG nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 916.241408] env[61998]: INFO nova.compute.manager [-] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Took 1.51 seconds to deallocate network for instance. [ 916.306312] env[61998]: INFO nova.compute.manager [-] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Took 1.02 seconds to deallocate network for instance. [ 916.331999] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.332174] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.332308] env[61998]: DEBUG nova.network.neutron [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.383145] env[61998]: DEBUG nova.scheduler.client.report [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 916.626978] env[61998]: DEBUG nova.compute.manager [req-a88610f3-bb7b-4751-9675-5a973f8a0f82 req-4864b857-652d-440c-a985-36858b410f64 service nova] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Received event network-vif-deleted-a1959129-1e34-4499-b312-c6580996cd63 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 916.749084] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.813828] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.867355] env[61998]: DEBUG nova.network.neutron [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.889643] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.892659] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.987s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.892834] env[61998]: DEBUG nova.objects.instance [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lazy-loading 'resources' on Instance uuid 6e71b3c4-bac7-455c-94fd-2a9bc5128132 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.916094] env[61998]: INFO nova.scheduler.client.report [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted allocations for instance 23265b26-7579-4514-a172-8cf2ec124ec6 [ 917.067295] env[61998]: DEBUG nova.network.neutron [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance_info_cache with network_info: [{"id": "9f5103b5-d9cc-4978-9140-901834a6af10", "address": "fa:16:3e:91:a2:96", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5103b5-d9", "ovs_interfaceid": "9f5103b5-d9cc-4978-9140-901834a6af10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.100318] env[61998]: DEBUG oslo_concurrency.lockutils [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.100603] env[61998]: DEBUG oslo_concurrency.lockutils [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.100867] env[61998]: INFO nova.compute.manager [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Attaching volume 427f7ffb-87ef-476e-b045-24bddd236eff to /dev/sdb [ 917.141734] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a08fd37-de98-4cfa-837b-05ba91963af7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.149896] env[61998]: DEBUG nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 917.156608] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d261f9e-23b8-49d0-86b5-2ce5b9bdcedb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.159741] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "34143cac-64e9-41fd-a970-b593d1472d92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.159741] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "34143cac-64e9-41fd-a970-b593d1472d92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.175219] env[61998]: DEBUG nova.virt.block_device [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updating existing volume attachment record: 8d3a7c5c-f36a-454e-8d0b-b21326210b44 {{(pid=61998) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 917.180767] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 917.182921] env[61998]: DEBUG nova.virt.hardware [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.184183] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4d9c7f-c421-444d-a7bc-250780124edc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.193074] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c70373-cf50-4970-b5a1-c52ced157cff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.424065] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c3fbf4ff-6a1f-4173-92bc-94abe3be1181 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "23265b26-7579-4514-a172-8cf2ec124ec6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.485s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.538026] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9093f548-aa20-4336-8575-7edfd2ab673d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.550335] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec21659-e7af-40a6-90ad-bf09705c033a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.579269] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.579575] env[61998]: DEBUG nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Instance network_info: |[{"id": "9f5103b5-d9cc-4978-9140-901834a6af10", "address": "fa:16:3e:91:a2:96", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5103b5-d9", "ovs_interfaceid": "9f5103b5-d9cc-4978-9140-901834a6af10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 917.580529] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:a2:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f5103b5-d9cc-4978-9140-901834a6af10', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.588153] env[61998]: DEBUG oslo.service.loopingcall [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.588878] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d99ff1d-0fc7-4ca9-a0fd-9c752b33950b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.591895] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.591895] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f239164d-3c45-4233-8825-9fa570f5db31 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.613949] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0fd2af-9911-4e5e-a0ae-75210a4fe16a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.617950] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.617950] env[61998]: value = "task-1388762" [ 917.617950] env[61998]: _type = "Task" [ 917.617950] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.628414] env[61998]: DEBUG nova.compute.provider_tree [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.635172] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388762, 'name': CreateVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.661974] env[61998]: DEBUG nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 917.779152] env[61998]: DEBUG nova.compute.manager [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Received event network-changed-9f5103b5-d9cc-4978-9140-901834a6af10 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 917.779387] env[61998]: DEBUG nova.compute.manager [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Refreshing instance network info cache due to event network-changed-9f5103b5-d9cc-4978-9140-901834a6af10. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 917.779619] env[61998]: DEBUG oslo_concurrency.lockutils [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] Acquiring lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.779711] env[61998]: DEBUG oslo_concurrency.lockutils [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] Acquired lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.779874] env[61998]: DEBUG nova.network.neutron [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Refreshing network info cache for port 9f5103b5-d9cc-4978-9140-901834a6af10 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.891405] env[61998]: DEBUG nova.network.neutron [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Successfully updated port: c38fbbcf-cd65-4aed-a882-c4536afc9be3 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.131371] env[61998]: DEBUG nova.scheduler.client.report [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 918.134700] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388762, 'name': CreateVM_Task, 'duration_secs': 0.317456} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.135231] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 918.136060] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.136358] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.136796] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.137157] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4625713d-7d5a-488d-85da-d60bdf0906d9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.142764] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 918.142764] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524039aa-14dc-969f-f5c1-cf14acd22401" [ 918.142764] env[61998]: _type = "Task" [ 918.142764] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.152030] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524039aa-14dc-969f-f5c1-cf14acd22401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.190095] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.394506] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-93df4e9a-29d2-4551-9bda-58b02163c116" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.394750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-93df4e9a-29d2-4551-9bda-58b02163c116" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.394750] env[61998]: DEBUG nova.network.neutron [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.482196] env[61998]: DEBUG nova.network.neutron [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updated VIF entry in instance network info cache for port 9f5103b5-d9cc-4978-9140-901834a6af10. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.482588] env[61998]: DEBUG nova.network.neutron [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance_info_cache with network_info: [{"id": "9f5103b5-d9cc-4978-9140-901834a6af10", "address": "fa:16:3e:91:a2:96", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5103b5-d9", "ovs_interfaceid": "9f5103b5-d9cc-4978-9140-901834a6af10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.636155] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.638439] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.705s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.638685] env[61998]: DEBUG nova.objects.instance [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lazy-loading 'resources' on Instance uuid b9c5feec-7bfd-470e-9833-b45403195e83 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.654542] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]524039aa-14dc-969f-f5c1-cf14acd22401, 'name': SearchDatastore_Task, 'duration_secs': 0.010841} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.655468] env[61998]: INFO nova.scheduler.client.report [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted allocations for instance 6e71b3c4-bac7-455c-94fd-2a9bc5128132 [ 918.656418] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.657930] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.657930] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.657930] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.657930] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.659524] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b8929d2-53fa-4295-8ad4-40934c8646de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.663389] env[61998]: DEBUG nova.compute.manager [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Received event network-vif-plugged-c38fbbcf-cd65-4aed-a882-c4536afc9be3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 918.663591] env[61998]: DEBUG oslo_concurrency.lockutils [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] Acquiring lock "93df4e9a-29d2-4551-9bda-58b02163c116-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.663786] env[61998]: DEBUG oslo_concurrency.lockutils [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] Lock "93df4e9a-29d2-4551-9bda-58b02163c116-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.663952] env[61998]: DEBUG oslo_concurrency.lockutils [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] Lock "93df4e9a-29d2-4551-9bda-58b02163c116-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.664136] env[61998]: DEBUG nova.compute.manager [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] No waiting events found dispatching network-vif-plugged-c38fbbcf-cd65-4aed-a882-c4536afc9be3 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 918.664303] env[61998]: WARNING nova.compute.manager [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Received unexpected event network-vif-plugged-c38fbbcf-cd65-4aed-a882-c4536afc9be3 for instance with vm_state building and task_state spawning. [ 918.664463] env[61998]: DEBUG nova.compute.manager [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Received event network-changed-c38fbbcf-cd65-4aed-a882-c4536afc9be3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 918.664617] env[61998]: DEBUG nova.compute.manager [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Refreshing instance network info cache due to event network-changed-c38fbbcf-cd65-4aed-a882-c4536afc9be3. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 918.664786] env[61998]: DEBUG oslo_concurrency.lockutils [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] Acquiring lock "refresh_cache-93df4e9a-29d2-4551-9bda-58b02163c116" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.672432] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.672618] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.673338] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9a06aab-d9ef-49bd-81d9-1d483aec83e4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.678837] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 918.678837] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]526ae2cc-b052-8c46-26eb-943f6cdcb986" [ 918.678837] env[61998]: _type = "Task" [ 918.678837] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.687372] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526ae2cc-b052-8c46-26eb-943f6cdcb986, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.942492] env[61998]: DEBUG nova.network.neutron [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.985019] env[61998]: DEBUG oslo_concurrency.lockutils [req-5470578d-11ca-42f4-8c11-d34572c7b3b0 req-22e7ff04-c6d8-4a8a-8084-c54b0a943ce3 service nova] Releasing lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.165956] env[61998]: DEBUG nova.network.neutron [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Updating instance_info_cache with network_info: [{"id": "c38fbbcf-cd65-4aed-a882-c4536afc9be3", "address": "fa:16:3e:6f:e7:8a", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38fbbcf-cd", "ovs_interfaceid": "c38fbbcf-cd65-4aed-a882-c4536afc9be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.167381] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7e498119-5866-4fe6-9023-97ad0c6d885a tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "6e71b3c4-bac7-455c-94fd-2a9bc5128132" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.297s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.193270] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526ae2cc-b052-8c46-26eb-943f6cdcb986, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.197065] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3024657-8280-408d-8150-4ae2b39aad6e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.206410] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 919.206410] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]522b8074-0fdd-d63d-dfb6-db81517b8e1e" [ 919.206410] env[61998]: _type = "Task" [ 919.206410] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.215301] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522b8074-0fdd-d63d-dfb6-db81517b8e1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.290801] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36540b4-1a2e-4003-a2f7-ad76fb351f32 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.300177] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d7b551-ebc5-41ed-8180-1842d958a7a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.334688] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710339be-9c32-4a35-81e1-af4242445ee4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.343159] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff8ffac-dcf0-478b-9aee-5d22c90d7c4b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.357189] env[61998]: DEBUG nova.compute.provider_tree [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.669942] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-93df4e9a-29d2-4551-9bda-58b02163c116" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.670353] env[61998]: DEBUG nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Instance network_info: |[{"id": "c38fbbcf-cd65-4aed-a882-c4536afc9be3", "address": "fa:16:3e:6f:e7:8a", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38fbbcf-cd", "ovs_interfaceid": "c38fbbcf-cd65-4aed-a882-c4536afc9be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 919.670949] env[61998]: DEBUG oslo_concurrency.lockutils [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] Acquired lock "refresh_cache-93df4e9a-29d2-4551-9bda-58b02163c116" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.671168] env[61998]: DEBUG nova.network.neutron [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Refreshing network info cache for port c38fbbcf-cd65-4aed-a882-c4536afc9be3 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.672389] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:e7:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c38fbbcf-cd65-4aed-a882-c4536afc9be3', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 919.680106] env[61998]: DEBUG oslo.service.loopingcall [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.683074] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 919.684015] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ce00647-a9b1-48ec-b5df-b37e70b3f7df {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.704786] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 919.704786] env[61998]: value = "task-1388764" [ 919.704786] env[61998]: _type = "Task" [ 919.704786] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.715604] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388764, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.719078] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522b8074-0fdd-d63d-dfb6-db81517b8e1e, 'name': SearchDatastore_Task, 'duration_secs': 0.010043} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.719637] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.719637] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 43ff4071-05f5-4e5c-a46d-1ca6c99809f0/43ff4071-05f5-4e5c-a46d-1ca6c99809f0.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.719855] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66d18d4e-c459-46cb-bcfb-2b9f583fa3aa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.727291] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 919.727291] env[61998]: value = "task-1388765" [ 919.727291] env[61998]: _type = "Task" [ 919.727291] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.739367] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388765, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.861809] env[61998]: DEBUG nova.scheduler.client.report [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 919.887473] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.887784] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.929213] env[61998]: DEBUG nova.network.neutron [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Updated VIF entry in instance network info cache for port c38fbbcf-cd65-4aed-a882-c4536afc9be3. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.929613] env[61998]: DEBUG nova.network.neutron [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Updating instance_info_cache with network_info: [{"id": "c38fbbcf-cd65-4aed-a882-c4536afc9be3", "address": "fa:16:3e:6f:e7:8a", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38fbbcf-cd", "ovs_interfaceid": "c38fbbcf-cd65-4aed-a882-c4536afc9be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.216014] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388764, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.238712] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388765, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485986} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.238983] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 43ff4071-05f5-4e5c-a46d-1ca6c99809f0/43ff4071-05f5-4e5c-a46d-1ca6c99809f0.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.239219] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.239471] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca253927-cac8-4a8e-88b7-f55c1f2b8452 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.246449] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 920.246449] env[61998]: value = "task-1388766" [ 920.246449] env[61998]: _type = "Task" [ 920.246449] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.257024] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388766, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.370758] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.732s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.374277] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.929s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.374277] env[61998]: DEBUG nova.objects.instance [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61998) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 920.392760] env[61998]: DEBUG nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 920.396017] env[61998]: INFO nova.scheduler.client.report [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Deleted allocations for instance b9c5feec-7bfd-470e-9833-b45403195e83 [ 920.433434] env[61998]: DEBUG oslo_concurrency.lockutils [req-7bdd68ce-eccf-4229-ad49-85681cbdde0d req-87b8a10f-f04e-4b50-8da6-2547356242d8 service nova] Releasing lock "refresh_cache-93df4e9a-29d2-4551-9bda-58b02163c116" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.715974] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388764, 'name': CreateVM_Task, 'duration_secs': 0.671127} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.716195] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 920.716853] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.717034] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.717362] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 920.717610] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34c7c8dd-9a07-438e-a312-56ee4565db11 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.722130] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 920.722130] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523cbb29-e876-1ac7-3843-2d5117344a3a" [ 920.722130] env[61998]: _type = "Task" [ 920.722130] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.729752] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523cbb29-e876-1ac7-3843-2d5117344a3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.755268] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388766, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068023} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.755564] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.756378] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aa3246-1f88-4850-af7a-cd6e245b2575 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.778659] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 43ff4071-05f5-4e5c-a46d-1ca6c99809f0/43ff4071-05f5-4e5c-a46d-1ca6c99809f0.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.778917] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffce007d-af3e-4aed-a115-ca733aa7577d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.798733] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 920.798733] env[61998]: value = "task-1388767" [ 920.798733] env[61998]: _type = "Task" [ 920.798733] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.806022] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388767, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.905504] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c195146e-3991-4b01-9255-89513c939225 tempest-ServerRescueTestJSONUnderV235-956665415 tempest-ServerRescueTestJSONUnderV235-956665415-project-member] Lock "b9c5feec-7bfd-470e-9833-b45403195e83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.962s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.911622] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.233461] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523cbb29-e876-1ac7-3843-2d5117344a3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009166} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.233918] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.234167] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 921.234419] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.234572] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.234766] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 921.235075] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92b886e2-fb08-4cb4-9c3d-68c3956a6195 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.243822] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.243996] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 921.244724] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2263ba4-a4fd-4605-b44e-3a03a74e9f35 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.250162] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 921.250162] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5208d55e-34b3-9762-2737-9ff0832b2e67" [ 921.250162] env[61998]: _type = "Task" [ 921.250162] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.259040] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5208d55e-34b3-9762-2737-9ff0832b2e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.309121] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388767, 'name': ReconfigVM_Task, 'duration_secs': 0.262759} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.309436] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 43ff4071-05f5-4e5c-a46d-1ca6c99809f0/43ff4071-05f5-4e5c-a46d-1ca6c99809f0.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.310088] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c71a0c47-64e8-4e3e-8cb4-5b99d958aaed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.316548] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 921.316548] env[61998]: value = "task-1388768" [ 921.316548] env[61998]: _type = "Task" [ 921.316548] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.325584] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388768, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.384434] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d2b649a8-e20e-403c-b705-acf791fd2ee8 tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.385953] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.519s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.385953] env[61998]: DEBUG nova.objects.instance [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'pci_requests' on Instance uuid 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.727415] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Volume attach. Driver type: vmdk {{(pid=61998) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 921.727667] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294762', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'name': 'volume-427f7ffb-87ef-476e-b045-24bddd236eff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'serial': '427f7ffb-87ef-476e-b045-24bddd236eff'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 921.728594] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b8abf3-5e0b-4094-aa90-13543c05da53 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.745922] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e7b0c3-1054-4895-8f18-cde411d02c31 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.770565] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] volume-427f7ffb-87ef-476e-b045-24bddd236eff/volume-427f7ffb-87ef-476e-b045-24bddd236eff.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.773542] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-166f0d83-25be-4327-a639-8adda0c53d2f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.796785] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5208d55e-34b3-9762-2737-9ff0832b2e67, 'name': SearchDatastore_Task, 'duration_secs': 0.017436} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.798591] env[61998]: DEBUG oslo_vmware.api [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 921.798591] env[61998]: value = "task-1388769" [ 921.798591] env[61998]: _type = "Task" [ 921.798591] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.798793] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff0b3001-db70-417a-86d5-fd91bce52628 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.809772] env[61998]: DEBUG oslo_vmware.api [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388769, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.810083] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 921.810083] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]525df269-d6d3-350c-2d79-a2d2ba87609c" [ 921.810083] env[61998]: _type = "Task" [ 921.810083] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.819070] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525df269-d6d3-350c-2d79-a2d2ba87609c, 'name': SearchDatastore_Task, 'duration_secs': 0.009516} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.823306] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.823679] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 93df4e9a-29d2-4551-9bda-58b02163c116/93df4e9a-29d2-4551-9bda-58b02163c116.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 921.823994] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ddddb71-928f-4357-a3d0-731f123e2346 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.831192] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388768, 'name': Rename_Task, 'duration_secs': 0.158908} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.832393] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.832704] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 921.832704] env[61998]: value = "task-1388770" [ 921.832704] env[61998]: _type = "Task" [ 921.832704] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.832918] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-966321cc-32cd-49af-96e5-f3b29e5bb321 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.842433] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.843673] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 921.843673] env[61998]: value = "task-1388771" [ 921.843673] env[61998]: _type = "Task" [ 921.843673] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.851571] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.890166] env[61998]: DEBUG nova.objects.instance [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'numa_topology' on Instance uuid 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.316395] env[61998]: DEBUG oslo_vmware.api [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388769, 'name': ReconfigVM_Task, 'duration_secs': 0.455594} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.316740] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfigured VM instance instance-0000004c to attach disk [datastore1] volume-427f7ffb-87ef-476e-b045-24bddd236eff/volume-427f7ffb-87ef-476e-b045-24bddd236eff.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.323604] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-840d3ef2-f319-4451-90e7-8ef912b850c0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.340127] env[61998]: DEBUG oslo_vmware.api [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 922.340127] env[61998]: value = "task-1388772" [ 922.340127] env[61998]: _type = "Task" [ 922.340127] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.343837] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388770, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479647} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.346697] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 93df4e9a-29d2-4551-9bda-58b02163c116/93df4e9a-29d2-4551-9bda-58b02163c116.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 922.350032] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 922.350032] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ecef1b23-b088-42ac-adcf-b788d6deb80c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.361724] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388771, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.362550] env[61998]: DEBUG oslo_vmware.api [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388772, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.363402] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 922.363402] env[61998]: value = "task-1388773" [ 922.363402] env[61998]: _type = "Task" [ 922.363402] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.374709] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.392545] env[61998]: INFO nova.compute.claims [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.858026] env[61998]: DEBUG oslo_vmware.api [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388772, 'name': ReconfigVM_Task, 'duration_secs': 0.192546} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.858519] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294762', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'name': 'volume-427f7ffb-87ef-476e-b045-24bddd236eff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'serial': '427f7ffb-87ef-476e-b045-24bddd236eff'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 922.863101] env[61998]: DEBUG oslo_vmware.api [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388771, 'name': PowerOnVM_Task, 'duration_secs': 0.626747} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.863610] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.863823] env[61998]: INFO nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Took 8.02 seconds to spawn the instance on the hypervisor. [ 922.864018] env[61998]: DEBUG nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 922.864852] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e22071-73bb-451e-9339-38e1881f8c20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.878742] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106007} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.880803] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.881794] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08e0fb8-df28-4307-9ac7-9fed49d123f9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.909416] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 93df4e9a-29d2-4551-9bda-58b02163c116/93df4e9a-29d2-4551-9bda-58b02163c116.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.910639] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f3b531c-c367-41ce-880b-6341dad0b377 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.933090] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 922.933090] env[61998]: value = "task-1388774" [ 922.933090] env[61998]: _type = "Task" [ 922.933090] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.942674] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388774, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.388082] env[61998]: INFO nova.compute.manager [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Took 15.87 seconds to build instance. [ 923.443969] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.525272] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f83080-f112-494f-8334-d16ac4cce58e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.532957] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91db8b8f-eea8-4147-8c60-e0d67230d2a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.562926] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080c22a9-1e9f-4f41-82e1-e23fdc9821e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.570195] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c5db25-9d45-45c4-a6d4-3de1fb53ff24 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.583123] env[61998]: DEBUG nova.compute.provider_tree [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.890258] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eac39dd2-787b-4f2e-9172-50a372a3ec28 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.382s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.908760] env[61998]: DEBUG nova.objects.instance [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lazy-loading 'flavor' on Instance uuid 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.945653] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388774, 'name': ReconfigVM_Task, 'duration_secs': 0.868113} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.945981] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 93df4e9a-29d2-4551-9bda-58b02163c116/93df4e9a-29d2-4551-9bda-58b02163c116.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 923.947450] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3d24704-c98d-450d-bdc6-2bfb9056bdc7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.957584] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 923.957584] env[61998]: value = "task-1388775" [ 923.957584] env[61998]: _type = "Task" [ 923.957584] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.966107] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388775, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.086080] env[61998]: DEBUG nova.scheduler.client.report [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 924.414055] env[61998]: DEBUG oslo_concurrency.lockutils [None req-820da632-815d-4456-9685-c85f6551d975 tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.313s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.467870] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388775, 'name': Rename_Task, 'duration_secs': 0.16695} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.468179] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 924.468536] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d36daff3-8fd3-4b98-af4f-558bf407bda3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.474971] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 924.474971] env[61998]: value = "task-1388776" [ 924.474971] env[61998]: _type = "Task" [ 924.474971] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.484502] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.590732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.205s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.593212] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.889s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.593525] env[61998]: DEBUG nova.objects.instance [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'resources' on Instance uuid c51f684b-84f0-42b3-acf9-9e8317b10cb6 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.641568] env[61998]: INFO nova.network.neutron [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 924.985619] env[61998]: DEBUG oslo_vmware.api [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388776, 'name': PowerOnVM_Task, 'duration_secs': 0.474063} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.985862] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 924.986131] env[61998]: INFO nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Took 7.84 seconds to spawn the instance on the hypervisor. [ 924.986321] env[61998]: DEBUG nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 924.987107] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79bd487-9b09-4e28-b3dd-f15546541007 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.239869] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f78efea-85c7-4d9d-bebe-dd34a69bb745 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.248030] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66166a57-6b14-412a-b0bc-ef949df74539 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.253647] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.253913] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.280773] env[61998]: DEBUG nova.compute.utils [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 925.282343] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999a571f-3a0a-4c02-8409-ed1cd5c55d93 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.292990] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82464768-5388-4df5-b256-c930edf36226 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.306888] env[61998]: DEBUG nova.compute.provider_tree [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.505398] env[61998]: INFO nova.compute.manager [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Took 16.90 seconds to build instance. [ 925.677679] env[61998]: DEBUG nova.compute.manager [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Stashing vm_state: active {{(pid=61998) _prep_resize /opt/stack/nova/nova/compute/manager.py:5923}} [ 925.786452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 0.532s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.809575] env[61998]: DEBUG nova.scheduler.client.report [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 926.007608] env[61998]: DEBUG oslo_concurrency.lockutils [None req-da1f26d0-cede-48ac-b197-09725de3dfe8 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.420s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.103231] env[61998]: DEBUG nova.compute.manager [req-4ba1356e-9c18-484f-a37b-2534ed398771 req-a2aafb4f-da11-40f5-ac8c-b9e447883805 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-vif-plugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 926.103231] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba1356e-9c18-484f-a37b-2534ed398771 req-a2aafb4f-da11-40f5-ac8c-b9e447883805 service nova] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.103384] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba1356e-9c18-484f-a37b-2534ed398771 req-a2aafb4f-da11-40f5-ac8c-b9e447883805 service nova] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.103597] env[61998]: DEBUG oslo_concurrency.lockutils [req-4ba1356e-9c18-484f-a37b-2534ed398771 req-a2aafb4f-da11-40f5-ac8c-b9e447883805 service nova] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.103843] env[61998]: DEBUG nova.compute.manager [req-4ba1356e-9c18-484f-a37b-2534ed398771 req-a2aafb4f-da11-40f5-ac8c-b9e447883805 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] No waiting events found dispatching network-vif-plugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 926.104118] env[61998]: WARNING nova.compute.manager [req-4ba1356e-9c18-484f-a37b-2534ed398771 req-a2aafb4f-da11-40f5-ac8c-b9e447883805 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received unexpected event network-vif-plugged-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 for instance with vm_state shelved_offloaded and task_state spawning. [ 926.195471] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.195688] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.195868] env[61998]: DEBUG nova.network.neutron [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 926.199725] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.314595] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.318685] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.569s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.318685] env[61998]: DEBUG nova.objects.instance [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lazy-loading 'resources' on Instance uuid 4ca7de74-3bcb-4da0-a2e1-573584467cc9 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.338846] env[61998]: INFO nova.scheduler.client.report [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted allocations for instance c51f684b-84f0-42b3-acf9-9e8317b10cb6 [ 926.582675] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "93df4e9a-29d2-4551-9bda-58b02163c116" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.583299] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.583497] env[61998]: DEBUG nova.compute.manager [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 926.584403] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31e60ab-377d-481b-af7b-4ff81a499b05 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.591528] env[61998]: DEBUG nova.compute.manager [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61998) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3378}} [ 926.592160] env[61998]: DEBUG nova.objects.instance [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'flavor' on Instance uuid 93df4e9a-29d2-4551-9bda-58b02163c116 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.826399] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.826677] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.826915] env[61998]: INFO nova.compute.manager [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Attaching volume 740a704c-fcec-455a-84c5-cd429ec2a7be to /dev/sdc [ 926.849209] env[61998]: DEBUG oslo_concurrency.lockutils [None req-db4a4cad-582d-47be-ab10-904e46e8c8e7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.546s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.853398] env[61998]: DEBUG oslo_concurrency.lockutils [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] Acquired lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.853398] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec378aa-6179-4cd0-a857-70697558ba6d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.860666] env[61998]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 926.860784] env[61998]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=61998) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 926.864286] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e40f1969-6cfe-44b6-afc6-7e08c8a94481 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.867971] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e54c4e8-d696-45bf-8629-0e562afc3a0a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.876106] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9435fa98-bbda-414d-8803-3f0b305fc1fc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.883386] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0140a0fe-ff49-4447-b94b-ed2d24cf099d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.905659] env[61998]: DEBUG nova.virt.block_device [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updating existing volume attachment record: c4fc2632-a569-4de5-a97c-1da84185de1e {{(pid=61998) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 926.916946] env[61998]: ERROR root [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-294712' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-294712' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-294712' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-294712'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-294712' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-294712' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-294712'}\n"]: nova.exception.InstanceNotFound: Instance c51f684b-84f0-42b3-acf9-9e8317b10cb6 could not be found. [ 926.917173] env[61998]: DEBUG oslo_concurrency.lockutils [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] Releasing lock "c51f684b-84f0-42b3-acf9-9e8317b10cb6" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.917403] env[61998]: DEBUG nova.compute.manager [req-11f55cc0-b515-42dc-bf54-88c3a67e201e req-bbbe67cc-cb4a-4613-ac71-809d5ab94e11 service nova] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Detach interface failed, port_id=95b14cbf-3b14-4a08-a168-03339234265d, reason: Instance c51f684b-84f0-42b3-acf9-9e8317b10cb6 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 926.932960] env[61998]: DEBUG nova.network.neutron [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.996542] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e52e63-50b8-4b93-9dd9-a1c13e6793ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.003732] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd44903-eb9f-4abd-8667-7c3e64d6961f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.035302] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d86c8b-efa0-4d93-9db9-2aa5c6770136 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.044586] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc2ae63-4b3e-4d60-ab90-5d4a6c220e61 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.062303] env[61998]: DEBUG nova.compute.provider_tree [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.436447] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.468394] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='abd964a5908704d381285190aaaddbfc',container_format='bare',created_at=2024-10-31T11:55:46Z,direct_url=,disk_format='vmdk',id=45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2000970641-shelved',owner='5dc1064c95484fd4afd1de8243b72d55',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-10-31T11:56:01Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 927.468678] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 927.468855] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.469111] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 927.469275] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.469429] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 927.469661] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 927.469924] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 927.470220] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 927.470479] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 927.470756] env[61998]: DEBUG nova.virt.hardware [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 927.471847] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09903cb-aab2-4d41-8727-46ea7e3b747f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.480445] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb892a1f-1363-44e3-86ce-974ac07c6f48 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.494252] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:c1:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.502367] env[61998]: DEBUG oslo.service.loopingcall [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.502636] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.502855] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1a516b0-37ee-4a0d-8996-b8d6b1e63c43 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.522919] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.522919] env[61998]: value = "task-1388778" [ 927.522919] env[61998]: _type = "Task" [ 927.522919] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.532505] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388778, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.561172] env[61998]: DEBUG nova.scheduler.client.report [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 927.600211] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.600211] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f6a291b-d359-4ccf-96b2-0fad9ad62afd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.606745] env[61998]: DEBUG oslo_vmware.api [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 927.606745] env[61998]: value = "task-1388779" [ 927.606745] env[61998]: _type = "Task" [ 927.606745] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.615400] env[61998]: DEBUG oslo_vmware.api [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.033085] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388778, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.067226] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.749s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.069491] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.256s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.069732] env[61998]: DEBUG nova.objects.instance [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lazy-loading 'resources' on Instance uuid aaee1558-f98b-4006-93b6-69434c78e79c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.083016] env[61998]: INFO nova.scheduler.client.report [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Deleted allocations for instance 4ca7de74-3bcb-4da0-a2e1-573584467cc9 [ 928.117036] env[61998]: DEBUG oslo_vmware.api [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388779, 'name': PowerOffVM_Task, 'duration_secs': 0.199} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.117274] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 928.117496] env[61998]: DEBUG nova.compute.manager [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 928.118393] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4459b16-70b5-4ef3-8a8e-9a13489211ba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.129789] env[61998]: DEBUG nova.compute.manager [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 928.130051] env[61998]: DEBUG nova.compute.manager [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing instance network info cache due to event network-changed-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 928.130339] env[61998]: DEBUG oslo_concurrency.lockutils [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.130395] env[61998]: DEBUG oslo_concurrency.lockutils [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.130555] env[61998]: DEBUG nova.network.neutron [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Refreshing network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.534456] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388778, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.543939] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.544192] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.591899] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d7ac51b-ea72-4f91-a800-be84324e521c tempest-ServersTestManualDisk-2046091098 tempest-ServersTestManualDisk-2046091098-project-member] Lock "4ca7de74-3bcb-4da0-a2e1-573584467cc9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.069s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.631404] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0bab92a3-0781-425e-9de4-2d520cea0966 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.698349] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5106e7-39ef-439f-9a50-ca8f57196041 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.708013] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5f1505-5585-476d-bc59-10dd7d945117 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.739683] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3447e0e1-3396-427b-98bc-96239e4be282 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.747473] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc87b94-e40f-4e80-b6e6-9e7eec22d4b4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.762667] env[61998]: DEBUG nova.compute.provider_tree [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.872967] env[61998]: DEBUG nova.network.neutron [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updated VIF entry in instance network info cache for port da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.873360] env[61998]: DEBUG nova.network.neutron [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.034648] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388778, 'name': CreateVM_Task, 'duration_secs': 1.345989} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.034822] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 929.035531] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.035734] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.036191] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 929.036423] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a7c07d5-0c8f-4ea7-a1d3-923d4c0b78f2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.040710] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 929.040710] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52f09646-c82b-d61c-63dc-4d045037da8a" [ 929.040710] env[61998]: _type = "Task" [ 929.040710] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.048092] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "93df4e9a-29d2-4551-9bda-58b02163c116" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.048350] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.048588] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "93df4e9a-29d2-4551-9bda-58b02163c116-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.048806] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.048984] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.050660] env[61998]: DEBUG nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 929.053159] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52f09646-c82b-d61c-63dc-4d045037da8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.053723] env[61998]: INFO nova.compute.manager [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Terminating instance [ 929.057898] env[61998]: DEBUG nova.compute.manager [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 929.058250] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 929.059051] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34acc2e7-a01e-49e2-b170-f8230b1db0f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.066107] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.066351] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-059e10f1-b204-403f-81b8-7075c63c9473 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.252584] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.252814] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.253009] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleting the datastore file [datastore1] 93df4e9a-29d2-4551-9bda-58b02163c116 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.253291] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84c4807f-8370-4fc9-8f60-e197cdbbd11c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.259411] env[61998]: DEBUG oslo_vmware.api [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 929.259411] env[61998]: value = "task-1388782" [ 929.259411] env[61998]: _type = "Task" [ 929.259411] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.267903] env[61998]: DEBUG nova.scheduler.client.report [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 929.271016] env[61998]: DEBUG oslo_vmware.api [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.375707] env[61998]: DEBUG oslo_concurrency.lockutils [req-69f8ad5b-9d98-4524-94b7-9f7695feff02 req-ee7645b6-b390-4113-9c74-9f355ee98752 service nova] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.553210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.553493] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Processing image 45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.553757] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.553947] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.554133] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.556690] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3237c22e-f0d1-4630-bb09-5b413bc57c96 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.568039] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.568306] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.569249] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-711105ca-28c7-48b3-8a5b-23c389f12d8a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.575427] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 929.575427] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5213e9ae-50d0-4706-fde1-2ffa605d92c7" [ 929.575427] env[61998]: _type = "Task" [ 929.575427] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.582476] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5213e9ae-50d0-4706-fde1-2ffa605d92c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.583421] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.769829] env[61998]: DEBUG oslo_vmware.api [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141299} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.770173] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.770395] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.770579] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.770760] env[61998]: INFO nova.compute.manager [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Took 0.71 seconds to destroy the instance on the hypervisor. [ 929.771016] env[61998]: DEBUG oslo.service.loopingcall [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.771255] env[61998]: DEBUG nova.compute.manager [-] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 929.771354] env[61998]: DEBUG nova.network.neutron [-] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 929.773421] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.775391] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.585s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.776885] env[61998]: INFO nova.compute.claims [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.795292] env[61998]: INFO nova.scheduler.client.report [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Deleted allocations for instance aaee1558-f98b-4006-93b6-69434c78e79c [ 930.091840] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Preparing fetch location {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 930.092139] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Fetch image to [datastore1] OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e/OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e.vmdk {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 930.092335] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Downloading stream optimized image 45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af to [datastore1] OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e/OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e.vmdk on the data store datastore1 as vApp {{(pid=61998) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 930.092509] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Downloading image file data 45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af to the ESX as VM named 'OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e' {{(pid=61998) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 930.155717] env[61998]: DEBUG nova.compute.manager [req-ff43e45d-73b1-4a46-b1d7-dbbe3b3ff80d req-97e5012d-a957-47d6-b1c9-14a3ad647ccc service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Received event network-vif-deleted-c38fbbcf-cd65-4aed-a882-c4536afc9be3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 930.156062] env[61998]: INFO nova.compute.manager [req-ff43e45d-73b1-4a46-b1d7-dbbe3b3ff80d req-97e5012d-a957-47d6-b1c9-14a3ad647ccc service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Neutron deleted interface c38fbbcf-cd65-4aed-a882-c4536afc9be3; detaching it from the instance and deleting it from the info cache [ 930.156197] env[61998]: DEBUG nova.network.neutron [req-ff43e45d-73b1-4a46-b1d7-dbbe3b3ff80d req-97e5012d-a957-47d6-b1c9-14a3ad647ccc service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.175624] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 930.175624] env[61998]: value = "resgroup-9" [ 930.175624] env[61998]: _type = "ResourcePool" [ 930.175624] env[61998]: }. {{(pid=61998) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 930.176777] env[61998]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b5a4eb81-451f-4ae2-ac90-ef1a6688511f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.202688] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease: (returnval){ [ 930.202688] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 930.202688] env[61998]: _type = "HttpNfcLease" [ 930.202688] env[61998]: } obtained for vApp import into resource pool (val){ [ 930.202688] env[61998]: value = "resgroup-9" [ 930.202688] env[61998]: _type = "ResourcePool" [ 930.202688] env[61998]: }. {{(pid=61998) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 930.203225] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the lease: (returnval){ [ 930.203225] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 930.203225] env[61998]: _type = "HttpNfcLease" [ 930.203225] env[61998]: } to be ready. {{(pid=61998) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 930.209907] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.209907] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 930.209907] env[61998]: _type = "HttpNfcLease" [ 930.209907] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 930.306054] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0600b25f-606b-4348-be8d-178fcc5f153c tempest-ServerShowV254Test-1392423796 tempest-ServerShowV254Test-1392423796-project-member] Lock "aaee1558-f98b-4006-93b6-69434c78e79c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.272s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.536480] env[61998]: DEBUG nova.network.neutron [-] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.659325] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d88d282-9642-4aaa-a18b-bd797046ccce {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.668429] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad83dba-7f16-4600-9cc6-200971e282a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.693673] env[61998]: DEBUG nova.compute.manager [req-ff43e45d-73b1-4a46-b1d7-dbbe3b3ff80d req-97e5012d-a957-47d6-b1c9-14a3ad647ccc service nova] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Detach interface failed, port_id=c38fbbcf-cd65-4aed-a882-c4536afc9be3, reason: Instance 93df4e9a-29d2-4551-9bda-58b02163c116 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 930.712310] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.712310] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 930.712310] env[61998]: _type = "HttpNfcLease" [ 930.712310] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 930.904067] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d917c00b-3da0-48c6-a924-c84ae100a97f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.912123] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837ea3b4-99f1-4959-93c8-08505f54870c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.944378] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d90673-63cd-495a-9cc3-aaae3d41dfb7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.952773] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78379a0-a92f-40db-b817-d035aec01f5d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.967616] env[61998]: DEBUG nova.compute.provider_tree [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.038994] env[61998]: INFO nova.compute.manager [-] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Took 1.27 seconds to deallocate network for instance. [ 931.212813] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 931.212813] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 931.212813] env[61998]: _type = "HttpNfcLease" [ 931.212813] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 931.457937] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Volume attach. Driver type: vmdk {{(pid=61998) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 931.458215] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294765', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'name': 'volume-740a704c-fcec-455a-84c5-cd429ec2a7be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'serial': '740a704c-fcec-455a-84c5-cd429ec2a7be'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 931.459120] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0273c9-2c4b-4b15-8fea-3a50dad9f5b0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.479193] env[61998]: DEBUG nova.scheduler.client.report [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 931.487032] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66117f1-03e5-4280-8913-fb19061b718f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.512204] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] volume-740a704c-fcec-455a-84c5-cd429ec2a7be/volume-740a704c-fcec-455a-84c5-cd429ec2a7be.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.513173] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-194675d2-468c-4aa7-8490-f3e6941ab5da {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.531368] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 931.531368] env[61998]: value = "task-1388784" [ 931.531368] env[61998]: _type = "Task" [ 931.531368] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.539497] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388784, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.547755] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.715156] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 931.715156] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 931.715156] env[61998]: _type = "HttpNfcLease" [ 931.715156] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 931.988018] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.988747] env[61998]: DEBUG nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 931.991482] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.080s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.993285] env[61998]: INFO nova.compute.claims [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.041998] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388784, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.213879] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 932.213879] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 932.213879] env[61998]: _type = "HttpNfcLease" [ 932.213879] env[61998]: } is ready. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 932.214229] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 932.214229] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52626797-c875-d8fc-d189-9a7af53fdb2a" [ 932.214229] env[61998]: _type = "HttpNfcLease" [ 932.214229] env[61998]: }. {{(pid=61998) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 932.215075] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356a9165-d5a5-4aaa-9791-ef387ac94c7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.222486] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5275f9c4-5fbd-d1ad-712e-8f248a9bfb61/disk-0.vmdk from lease info. {{(pid=61998) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 932.222686] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5275f9c4-5fbd-d1ad-712e-8f248a9bfb61/disk-0.vmdk. {{(pid=61998) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 932.286555] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-89a9ccc2-319d-480b-a5f7-11967c7917c8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.497703] env[61998]: DEBUG nova.compute.utils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 932.504531] env[61998]: DEBUG nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 932.504722] env[61998]: DEBUG nova.network.neutron [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.546325] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388784, 'name': ReconfigVM_Task, 'duration_secs': 0.586884} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.547684] env[61998]: DEBUG nova.policy [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09cdb0d0d4e847b8b6fd8b311b406c35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5da5e404398849cd8d6ee4a8c99cd660', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 932.552993] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfigured VM instance instance-0000004c to attach disk [datastore1] volume-740a704c-fcec-455a-84c5-cd429ec2a7be/volume-740a704c-fcec-455a-84c5-cd429ec2a7be.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.561023] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8b3d9cd-f5b6-4fb6-b67c-b8b91b03478d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.580305] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 932.580305] env[61998]: value = "task-1388785" [ 932.580305] env[61998]: _type = "Task" [ 932.580305] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.589708] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388785, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.830639] env[61998]: DEBUG nova.network.neutron [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Successfully created port: 420f3548-5f01-4335-970c-a39ffe789c13 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.002639] env[61998]: DEBUG nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 933.091219] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388785, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.149444] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5eb0828-2cd6-447a-b127-0ff75ba7b7a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.159122] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b148327d-8964-4af8-92f9-dd8e5f0b3d02 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.197782] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11c7f60-af1f-4234-b9e8-76f501ad7e1d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.207336] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a72bebe-00f0-4d35-a3d8-4cc35a0825f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.224777] env[61998]: DEBUG nova.compute.provider_tree [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.406500] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Completed reading data from the image iterator. {{(pid=61998) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 933.406827] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5275f9c4-5fbd-d1ad-712e-8f248a9bfb61/disk-0.vmdk. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 933.407818] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e457619-2c49-4d01-b34b-b8cddc838f77 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.415122] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5275f9c4-5fbd-d1ad-712e-8f248a9bfb61/disk-0.vmdk is in state: ready. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 933.415330] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5275f9c4-5fbd-d1ad-712e-8f248a9bfb61/disk-0.vmdk. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 933.415564] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-cdd6aba3-8fa4-46cf-800a-fb282d42b4ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.591709] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388785, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.604161] env[61998]: DEBUG oslo_vmware.rw_handles [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5275f9c4-5fbd-d1ad-712e-8f248a9bfb61/disk-0.vmdk. {{(pid=61998) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 933.604434] env[61998]: INFO nova.virt.vmwareapi.images [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Downloaded image file data 45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af [ 933.605297] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17cba83-3763-44d9-bb66-c9b5324b9345 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.620511] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3e88dad-8707-4294-9a02-c034a63d3e2c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.641932] env[61998]: INFO nova.virt.vmwareapi.images [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] The imported VM was unregistered [ 933.644267] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Caching image {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 933.644504] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating directory with path [datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.644766] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b643cfd7-9f00-4308-a96b-e0ae64ad1aee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.655744] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created directory with path [datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.655926] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e/OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e.vmdk to [datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af.vmdk. {{(pid=61998) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 933.656189] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-24490c26-56f3-4f75-8b84-338ef2c9fbb4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.662371] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 933.662371] env[61998]: value = "task-1388787" [ 933.662371] env[61998]: _type = "Task" [ 933.662371] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.669939] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388787, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.730962] env[61998]: DEBUG nova.scheduler.client.report [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 934.012634] env[61998]: DEBUG nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 934.037585] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.037859] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.038040] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.038237] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.038391] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.038580] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.038893] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.039151] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.039461] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.039536] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.039695] env[61998]: DEBUG nova.virt.hardware [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.040612] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792909b0-15f9-4db5-bdb9-4f800f5e3d33 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.050947] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08fc707-5ef8-46fc-bf66-1aa37aa5a17d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.091077] env[61998]: DEBUG oslo_vmware.api [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388785, 'name': ReconfigVM_Task, 'duration_secs': 1.131133} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.091413] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294765', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'name': 'volume-740a704c-fcec-455a-84c5-cd429ec2a7be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'serial': '740a704c-fcec-455a-84c5-cd429ec2a7be'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 934.175440] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388787, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.236196] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.236794] env[61998]: DEBUG nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 934.239634] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 8.040s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.369619] env[61998]: DEBUG nova.compute.manager [req-4b40e53c-9f88-4f1a-b4b1-dd3b5e96dc68 req-3707c2f5-1684-4701-8b81-1994fc909e6b service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Received event network-vif-plugged-420f3548-5f01-4335-970c-a39ffe789c13 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 934.369869] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b40e53c-9f88-4f1a-b4b1-dd3b5e96dc68 req-3707c2f5-1684-4701-8b81-1994fc909e6b service nova] Acquiring lock "34143cac-64e9-41fd-a970-b593d1472d92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.370123] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b40e53c-9f88-4f1a-b4b1-dd3b5e96dc68 req-3707c2f5-1684-4701-8b81-1994fc909e6b service nova] Lock "34143cac-64e9-41fd-a970-b593d1472d92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.370315] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b40e53c-9f88-4f1a-b4b1-dd3b5e96dc68 req-3707c2f5-1684-4701-8b81-1994fc909e6b service nova] Lock "34143cac-64e9-41fd-a970-b593d1472d92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.370492] env[61998]: DEBUG nova.compute.manager [req-4b40e53c-9f88-4f1a-b4b1-dd3b5e96dc68 req-3707c2f5-1684-4701-8b81-1994fc909e6b service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] No waiting events found dispatching network-vif-plugged-420f3548-5f01-4335-970c-a39ffe789c13 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 934.370663] env[61998]: WARNING nova.compute.manager [req-4b40e53c-9f88-4f1a-b4b1-dd3b5e96dc68 req-3707c2f5-1684-4701-8b81-1994fc909e6b service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Received unexpected event network-vif-plugged-420f3548-5f01-4335-970c-a39ffe789c13 for instance with vm_state building and task_state spawning. [ 934.473101] env[61998]: DEBUG nova.network.neutron [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Successfully updated port: 420f3548-5f01-4335-970c-a39ffe789c13 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 934.675659] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388787, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.744570] env[61998]: DEBUG nova.compute.utils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.748967] env[61998]: INFO nova.compute.claims [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.753869] env[61998]: DEBUG nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 934.754870] env[61998]: DEBUG nova.network.neutron [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 934.795499] env[61998]: DEBUG nova.policy [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '744da696f7c64f62ae04195aa737fab4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c75c9b7c8d6b441d80fe512c37c88679', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 934.976439] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "refresh_cache-34143cac-64e9-41fd-a970-b593d1472d92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.976614] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquired lock "refresh_cache-34143cac-64e9-41fd-a970-b593d1472d92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.976746] env[61998]: DEBUG nova.network.neutron [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.072724] env[61998]: DEBUG nova.network.neutron [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Successfully created port: 67af4e0e-6ff7-417c-8f5f-9783e9786ff3 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.134393] env[61998]: DEBUG nova.objects.instance [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lazy-loading 'flavor' on Instance uuid 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.177440] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388787, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.254775] env[61998]: DEBUG nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 935.259012] env[61998]: INFO nova.compute.resource_tracker [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating resource usage from migration 3ba573ea-8f31-4ccb-921a-24186b139c2d [ 935.393346] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f250a62-7844-4840-b804-6cf8b15c181e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.403184] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef3e4bb-cad9-4243-bff5-9d4a67d0a83a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.438938] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5bdb21-125d-4e90-9017-f6556f41f2e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.449247] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e762c508-b7f2-49e1-a60c-9dda4b11ff14 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.465607] env[61998]: DEBUG nova.compute.provider_tree [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.523933] env[61998]: DEBUG nova.network.neutron [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 935.643120] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0a4757ce-3c3d-41d9-9e64-7cbf7fab3e6c tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.814s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.678044] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388787, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.688319] env[61998]: DEBUG nova.network.neutron [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Updating instance_info_cache with network_info: [{"id": "420f3548-5f01-4335-970c-a39ffe789c13", "address": "fa:16:3e:ec:b3:3d", "network": {"id": "4fb698c8-1174-49ac-acc7-2b7e6c71b6b2", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-964943749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5da5e404398849cd8d6ee4a8c99cd660", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap420f3548-5f", "ovs_interfaceid": "420f3548-5f01-4335-970c-a39ffe789c13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.969727] env[61998]: DEBUG nova.scheduler.client.report [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 936.176092] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388787, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.281104} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.176433] env[61998]: INFO nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e/OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e.vmdk to [datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af.vmdk. [ 936.176597] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Cleaning up location [datastore1] OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 936.176767] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_1911438d-9a7a-4bb5-bd91-356ef288ec6e {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.177091] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26b10f89-ba6e-41b1-93b9-bbbefaec00b9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.183400] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 936.183400] env[61998]: value = "task-1388788" [ 936.183400] env[61998]: _type = "Task" [ 936.183400] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.191551] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Releasing lock "refresh_cache-34143cac-64e9-41fd-a970-b593d1472d92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.191843] env[61998]: DEBUG nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Instance network_info: |[{"id": "420f3548-5f01-4335-970c-a39ffe789c13", "address": "fa:16:3e:ec:b3:3d", "network": {"id": "4fb698c8-1174-49ac-acc7-2b7e6c71b6b2", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-964943749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5da5e404398849cd8d6ee4a8c99cd660", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap420f3548-5f", "ovs_interfaceid": "420f3548-5f01-4335-970c-a39ffe789c13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 936.192118] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.192467] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b3:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4734e5e-2a76-4bda-8905-70c9bf9e007f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '420f3548-5f01-4335-970c-a39ffe789c13', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.199986] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Creating folder: Project (5da5e404398849cd8d6ee4a8c99cd660). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 936.200232] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba5645cb-7b14-41c3-b8c7-ecae4a50f996 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.209546] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Created folder: Project (5da5e404398849cd8d6ee4a8c99cd660) in parent group-v294665. [ 936.209729] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Creating folder: Instances. Parent ref: group-v294768. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 936.209938] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a859208b-a03b-4280-8f93-5c19d377aa21 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.218177] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Created folder: Instances in parent group-v294768. [ 936.218420] env[61998]: DEBUG oslo.service.loopingcall [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.218611] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.218790] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e239d99-c232-4f60-b576-5728d7c85b0f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.237823] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.237823] env[61998]: value = "task-1388791" [ 936.237823] env[61998]: _type = "Task" [ 936.237823] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.244808] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388791, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.267774] env[61998]: DEBUG nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 936.293620] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.293915] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.294185] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.294363] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.294581] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.294782] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.295027] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.295192] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.295367] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.295564] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.295863] env[61998]: DEBUG nova.virt.hardware [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.296727] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765c1a41-d2fd-4eef-9dba-a6fdfc56b7f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.304555] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ec4129-ce64-49d8-9d1d-c03d72a01b4c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.395541] env[61998]: DEBUG nova.compute.manager [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Received event network-changed-420f3548-5f01-4335-970c-a39ffe789c13 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 936.395705] env[61998]: DEBUG nova.compute.manager [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Refreshing instance network info cache due to event network-changed-420f3548-5f01-4335-970c-a39ffe789c13. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 936.395930] env[61998]: DEBUG oslo_concurrency.lockutils [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] Acquiring lock "refresh_cache-34143cac-64e9-41fd-a970-b593d1472d92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.396093] env[61998]: DEBUG oslo_concurrency.lockutils [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] Acquired lock "refresh_cache-34143cac-64e9-41fd-a970-b593d1472d92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.396390] env[61998]: DEBUG nova.network.neutron [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Refreshing network info cache for port 420f3548-5f01-4335-970c-a39ffe789c13 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.475173] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.236s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.475407] env[61998]: INFO nova.compute.manager [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Migrating [ 936.475679] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.475815] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.477123] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.894s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.479565] env[61998]: INFO nova.compute.claims [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.482504] env[61998]: INFO nova.compute.rpcapi [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 936.482802] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.500103] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.500103] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.693758] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.046121} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.694046] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.694407] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.694711] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af.vmdk to [datastore1] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 936.695076] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-612d416b-24c0-4d00-b529-d4fe0c88861d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.701393] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 936.701393] env[61998]: value = "task-1388792" [ 936.701393] env[61998]: _type = "Task" [ 936.701393] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.708913] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.712225] env[61998]: DEBUG nova.network.neutron [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Successfully updated port: 67af4e0e-6ff7-417c-8f5f-9783e9786ff3 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.748517] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388791, 'name': CreateVM_Task, 'duration_secs': 0.350377} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.748714] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 936.749776] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.749956] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.750575] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 936.750848] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ea73821-cc13-4229-ae9a-1978d16ee811 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.755390] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 936.755390] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5233ddba-9f58-dd1f-1f38-20c1b7019f5b" [ 936.755390] env[61998]: _type = "Task" [ 936.755390] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.767283] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5233ddba-9f58-dd1f-1f38-20c1b7019f5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.002539] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.002804] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.002925] env[61998]: DEBUG nova.network.neutron [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.005265] env[61998]: INFO nova.compute.manager [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Detaching volume 427f7ffb-87ef-476e-b045-24bddd236eff [ 937.047992] env[61998]: INFO nova.virt.block_device [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Attempting to driver detach volume 427f7ffb-87ef-476e-b045-24bddd236eff from mountpoint /dev/sdb [ 937.048267] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Volume detach. Driver type: vmdk {{(pid=61998) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 937.048553] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294762', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'name': 'volume-427f7ffb-87ef-476e-b045-24bddd236eff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'serial': '427f7ffb-87ef-476e-b045-24bddd236eff'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 937.049945] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873a6a20-9295-4d47-8e60-892ddc343ce3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.077210] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144d098a-8603-460c-810b-6083924194d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.087088] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cc4456-bc1f-49f4-8d4e-489a9fa46130 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.113550] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5620de9b-3b98-46b2-bf19-49bf71cb9b80 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.132063] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] The volume has not been displaced from its original location: [datastore1] volume-427f7ffb-87ef-476e-b045-24bddd236eff/volume-427f7ffb-87ef-476e-b045-24bddd236eff.vmdk. No consolidation needed. {{(pid=61998) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 937.137994] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfiguring VM instance instance-0000004c to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 937.138415] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6b86995-7319-4f3f-9c10-f327b7e6f439 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.158839] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 937.158839] env[61998]: value = "task-1388793" [ 937.158839] env[61998]: _type = "Task" [ 937.158839] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.169800] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388793, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.214647] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.215130] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.215130] env[61998]: DEBUG nova.network.neutron [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.218052] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388792, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.269111] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5233ddba-9f58-dd1f-1f38-20c1b7019f5b, 'name': SearchDatastore_Task, 'duration_secs': 0.015765} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.269111] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.269333] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.269462] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.269620] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.269806] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.270103] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6844cdbb-5179-415a-8a00-7ef35b2a30fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.287562] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.287768] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 937.288562] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ea0da95-d890-4577-96f7-e6dd8158a070 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.298359] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 937.298359] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52da7ac7-2dc3-a0f0-e1ad-6c1627045fc9" [ 937.298359] env[61998]: _type = "Task" [ 937.298359] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.308907] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52da7ac7-2dc3-a0f0-e1ad-6c1627045fc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.332889] env[61998]: DEBUG nova.network.neutron [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Updated VIF entry in instance network info cache for port 420f3548-5f01-4335-970c-a39ffe789c13. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 937.333286] env[61998]: DEBUG nova.network.neutron [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Updating instance_info_cache with network_info: [{"id": "420f3548-5f01-4335-970c-a39ffe789c13", "address": "fa:16:3e:ec:b3:3d", "network": {"id": "4fb698c8-1174-49ac-acc7-2b7e6c71b6b2", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-964943749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5da5e404398849cd8d6ee4a8c99cd660", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap420f3548-5f", "ovs_interfaceid": "420f3548-5f01-4335-970c-a39ffe789c13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.618370] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9119e526-7ad0-485d-9256-0bc22f439f79 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.628250] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a569345-fc66-4eb4-b365-bbd8af2f12d6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.671869] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397f7462-c6a8-4fea-9d5b-4f3e82a7bbde {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.683125] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388793, 'name': ReconfigVM_Task, 'duration_secs': 0.346273} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.684444] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cec1278-d78e-4b71-ac69-4a4b54bc7597 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.688506] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfigured VM instance instance-0000004c to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 937.693266] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4259f5cf-cb82-49b9-9101-1bb816f3eec3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.721609] env[61998]: DEBUG nova.compute.provider_tree [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.728974] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388792, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.728974] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 937.728974] env[61998]: value = "task-1388794" [ 937.728974] env[61998]: _type = "Task" [ 937.728974] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.741302] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.784779] env[61998]: DEBUG nova.network.neutron [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.813504] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52da7ac7-2dc3-a0f0-e1ad-6c1627045fc9, 'name': SearchDatastore_Task, 'duration_secs': 0.083829} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.814502] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85e2dd7d-4e87-4918-9db6-c813b304258e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.822675] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 937.822675] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]525c4866-c0a2-d02f-126b-6fc08392f247" [ 937.822675] env[61998]: _type = "Task" [ 937.822675] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.835323] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525c4866-c0a2-d02f-126b-6fc08392f247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.835873] env[61998]: DEBUG oslo_concurrency.lockutils [req-bfb1e351-1294-438c-8274-11f62f89b0c8 req-4acf788e-f930-41cc-ac4b-2b6b94535208 service nova] Releasing lock "refresh_cache-34143cac-64e9-41fd-a970-b593d1472d92" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.845227] env[61998]: DEBUG nova.network.neutron [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance_info_cache with network_info: [{"id": "9f5103b5-d9cc-4978-9140-901834a6af10", "address": "fa:16:3e:91:a2:96", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5103b5-d9", "ovs_interfaceid": "9f5103b5-d9cc-4978-9140-901834a6af10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.998463] env[61998]: DEBUG nova.network.neutron [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Updating instance_info_cache with network_info: [{"id": "67af4e0e-6ff7-417c-8f5f-9783e9786ff3", "address": "fa:16:3e:30:20:50", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67af4e0e-6f", "ovs_interfaceid": "67af4e0e-6ff7-417c-8f5f-9783e9786ff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.216189] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388792, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.225495] env[61998]: DEBUG nova.scheduler.client.report [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 938.240450] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388794, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.335544] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525c4866-c0a2-d02f-126b-6fc08392f247, 'name': SearchDatastore_Task, 'duration_secs': 0.088423} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.335978] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.336283] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 34143cac-64e9-41fd-a970-b593d1472d92/34143cac-64e9-41fd-a970-b593d1472d92.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 938.336578] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f005acb-8dca-41ab-af46-9f0458bc5401 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.346686] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 938.346686] env[61998]: value = "task-1388795" [ 938.346686] env[61998]: _type = "Task" [ 938.346686] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.347293] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.359676] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.422698] env[61998]: DEBUG nova.compute.manager [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Received event network-vif-plugged-67af4e0e-6ff7-417c-8f5f-9783e9786ff3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 938.422900] env[61998]: DEBUG oslo_concurrency.lockutils [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] Acquiring lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.423341] env[61998]: DEBUG oslo_concurrency.lockutils [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.423548] env[61998]: DEBUG oslo_concurrency.lockutils [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.423727] env[61998]: DEBUG nova.compute.manager [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] No waiting events found dispatching network-vif-plugged-67af4e0e-6ff7-417c-8f5f-9783e9786ff3 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 938.423946] env[61998]: WARNING nova.compute.manager [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Received unexpected event network-vif-plugged-67af4e0e-6ff7-417c-8f5f-9783e9786ff3 for instance with vm_state building and task_state spawning. [ 938.424146] env[61998]: DEBUG nova.compute.manager [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Received event network-changed-67af4e0e-6ff7-417c-8f5f-9783e9786ff3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 938.424310] env[61998]: DEBUG nova.compute.manager [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Refreshing instance network info cache due to event network-changed-67af4e0e-6ff7-417c-8f5f-9783e9786ff3. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 938.424485] env[61998]: DEBUG oslo_concurrency.lockutils [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] Acquiring lock "refresh_cache-a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.501104] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.501465] env[61998]: DEBUG nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Instance network_info: |[{"id": "67af4e0e-6ff7-417c-8f5f-9783e9786ff3", "address": "fa:16:3e:30:20:50", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67af4e0e-6f", "ovs_interfaceid": "67af4e0e-6ff7-417c-8f5f-9783e9786ff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 938.501802] env[61998]: DEBUG oslo_concurrency.lockutils [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] Acquired lock "refresh_cache-a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.501982] env[61998]: DEBUG nova.network.neutron [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Refreshing network info cache for port 67af4e0e-6ff7-417c-8f5f-9783e9786ff3 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.503393] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:20:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67af4e0e-6ff7-417c-8f5f-9783e9786ff3', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 938.512398] env[61998]: DEBUG oslo.service.loopingcall [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.513610] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.513864] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-729d3c7e-19b5-489a-a442-09750ded34c3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.536686] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.536686] env[61998]: value = "task-1388796" [ 938.536686] env[61998]: _type = "Task" [ 938.536686] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.547821] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388796, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.718653] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388792, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.730781] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.731426] env[61998]: DEBUG nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 938.735795] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.188s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.736087] env[61998]: DEBUG nova.objects.instance [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'resources' on Instance uuid 93df4e9a-29d2-4551-9bda-58b02163c116 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.749613] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388794, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.860554] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.046972] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388796, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.215218] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388792, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.372569} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.216040] env[61998]: DEBUG nova.network.neutron [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Updated VIF entry in instance network info cache for port 67af4e0e-6ff7-417c-8f5f-9783e9786ff3. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.216407] env[61998]: DEBUG nova.network.neutron [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Updating instance_info_cache with network_info: [{"id": "67af4e0e-6ff7-417c-8f5f-9783e9786ff3", "address": "fa:16:3e:30:20:50", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67af4e0e-6f", "ovs_interfaceid": "67af4e0e-6ff7-417c-8f5f-9783e9786ff3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.217581] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af/45c4f6dd-25ec-4eb7-a0bf-b5cd3adef4af.vmdk to [datastore1] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.218399] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c797f1-2934-4979-af15-8cf46cc08ab3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.240603] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk or device None with type streamOptimized {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.242465] env[61998]: DEBUG nova.compute.utils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 939.246204] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-392911c4-0fc5-40a7-af31-3088fe2359ab {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.263135] env[61998]: DEBUG nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 939.263135] env[61998]: DEBUG nova.network.neutron [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 939.274208] env[61998]: DEBUG oslo_vmware.api [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388794, 'name': ReconfigVM_Task, 'duration_secs': 1.150184} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.277693] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294762', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'name': 'volume-427f7ffb-87ef-476e-b045-24bddd236eff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '427f7ffb-87ef-476e-b045-24bddd236eff', 'serial': '427f7ffb-87ef-476e-b045-24bddd236eff'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 939.279768] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 939.279768] env[61998]: value = "task-1388797" [ 939.279768] env[61998]: _type = "Task" [ 939.279768] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.289570] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.307997] env[61998]: DEBUG nova.policy [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 939.365976] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388795, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.381178] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff20259-abf3-4f92-b0e4-8cbf4fd27ef3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.390080] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ad426f-4c8d-41ce-851b-abfe1fc6af92 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.425575] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e821dc63-f411-4fed-a773-f68a6ff853e2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.434998] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83610cd6-6a44-490d-964f-718043cad3f9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.451238] env[61998]: DEBUG nova.compute.provider_tree [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.550216] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388796, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.685141] env[61998]: DEBUG nova.network.neutron [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Successfully created port: f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 939.721799] env[61998]: DEBUG oslo_concurrency.lockutils [req-f5bc29d4-00eb-4e9e-8805-299e4564cac3 req-6bdd89b4-73d7-4375-a1c8-d8eee2b7a0f3 service nova] Releasing lock "refresh_cache-a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.762741] env[61998]: DEBUG nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 939.791027] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388797, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.830518] env[61998]: DEBUG nova.objects.instance [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lazy-loading 'flavor' on Instance uuid 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.858622] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388795, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.298967} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.859289] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 34143cac-64e9-41fd-a970-b593d1472d92/34143cac-64e9-41fd-a970-b593d1472d92.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.859289] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.859446] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c21f8162-db37-466c-acf2-d30ff29c89de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.865622] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 939.865622] env[61998]: value = "task-1388798" [ 939.865622] env[61998]: _type = "Task" [ 939.865622] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.872667] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cf1e17-c836-49dc-b46f-8cb1f38f0cb5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.878599] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388798, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.894758] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance '43ff4071-05f5-4e5c-a46d-1ca6c99809f0' progress to 0 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 939.953786] env[61998]: DEBUG nova.scheduler.client.report [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 940.047229] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388796, 'name': CreateVM_Task, 'duration_secs': 1.098828} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.047401] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.048083] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.048262] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.048593] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.048841] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72e33232-66d8-493b-b11f-b91f1b71e583 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.053572] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 940.053572] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]526ed5a4-76e8-330e-34f1-c2185f96c787" [ 940.053572] env[61998]: _type = "Task" [ 940.053572] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.060739] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526ed5a4-76e8-330e-34f1-c2185f96c787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.291646] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388797, 'name': ReconfigVM_Task, 'duration_secs': 0.554331} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.292347] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 2d0b199f-e0f1-42e0-afb5-e08602aebf01/2d0b199f-e0f1-42e0-afb5-e08602aebf01.vmdk or device None with type streamOptimized {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.293974] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'encryption_format': None, 'guest_format': None, 'disk_bus': None, 'size': 0, 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encrypted': False, 'image_id': 'a90c4a31-8bcc-48cf-ada7-7369ab14c460'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294760', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'name': 'volume-834152ba-512a-44f5-b453-523da9f699e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2d0b199f-e0f1-42e0-afb5-e08602aebf01', 'attached_at': '', 'detached_at': '', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'serial': '834152ba-512a-44f5-b453-523da9f699e7'}, 'attachment_id': 'd9075c39-55c5-4a93-b086-3378b4efdb94', 'guest_format': None, 'disk_bus': None, 'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=61998) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 940.295018] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Volume attach. Driver type: vmdk {{(pid=61998) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 940.295018] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294760', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'name': 'volume-834152ba-512a-44f5-b453-523da9f699e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2d0b199f-e0f1-42e0-afb5-e08602aebf01', 'attached_at': '', 'detached_at': '', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'serial': '834152ba-512a-44f5-b453-523da9f699e7'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 940.295625] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c05948-be3f-4674-b6fc-90872b81495b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.312919] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d98475-08df-4014-a090-83b452bdd739 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.339874] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] volume-834152ba-512a-44f5-b453-523da9f699e7/volume-834152ba-512a-44f5-b453-523da9f699e7.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.343085] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2756487b-1156-429f-9864-e51ce7dfbb54 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.365442] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 940.365442] env[61998]: value = "task-1388799" [ 940.365442] env[61998]: _type = "Task" [ 940.365442] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.376560] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388799, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.379659] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388798, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100921} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.379916] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.380636] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcbe17a-41b1-4594-b123-a6e2d675162b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.403588] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 34143cac-64e9-41fd-a970-b593d1472d92/34143cac-64e9-41fd-a970-b593d1472d92.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.405383] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.405604] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7aadbebc-b2bd-42a8-a0d5-99f76ee5230b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.419358] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcd69018-d5ed-4ea9-8b31-ba2a5129f12c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.426167] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 940.426167] env[61998]: value = "task-1388800" [ 940.426167] env[61998]: _type = "Task" [ 940.426167] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.428464] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 940.428464] env[61998]: value = "task-1388801" [ 940.428464] env[61998]: _type = "Task" [ 940.428464] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.441305] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.459625] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.724s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.481837] env[61998]: INFO nova.scheduler.client.report [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted allocations for instance 93df4e9a-29d2-4551-9bda-58b02163c116 [ 940.563945] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526ed5a4-76e8-330e-34f1-c2185f96c787, 'name': SearchDatastore_Task, 'duration_secs': 0.009352} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.564311] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.564554] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.564796] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.564948] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.565329] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.565419] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2618174b-1d8f-4992-a4ba-7a760c432e2e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.574898] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.575093] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.575773] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a24a9dd5-1207-4283-835b-b79952d64e43 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.580768] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 940.580768] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5203961a-05f9-a70c-0f44-25e7e52d03b8" [ 940.580768] env[61998]: _type = "Task" [ 940.580768] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.588700] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5203961a-05f9-a70c-0f44-25e7e52d03b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.772170] env[61998]: DEBUG nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 940.796449] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 940.796710] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 940.796916] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 940.797134] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 940.797306] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 940.797508] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 940.797735] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 940.797904] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 940.798093] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 940.798268] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 940.798446] env[61998]: DEBUG nova.virt.hardware [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 940.799317] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71b32c7-4d62-434e-9d66-34720ec200f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.807600] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48609a5-3bc4-4e0a-a8c5-ebec2a51aa12 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.861058] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.863647] env[61998]: DEBUG oslo_concurrency.lockutils [None req-8d602eea-727e-4c8d-a4ed-ad72bd62402d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.364s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.864787] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.004s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.874722] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388799, 'name': ReconfigVM_Task, 'duration_secs': 0.3247} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.874991] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfigured VM instance instance-00000042 to attach disk [datastore2] volume-834152ba-512a-44f5-b453-523da9f699e7/volume-834152ba-512a-44f5-b453-523da9f699e7.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.880453] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a94082b5-0278-4f4d-bcec-5145658abc44 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.897025] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 940.897025] env[61998]: value = "task-1388802" [ 940.897025] env[61998]: _type = "Task" [ 940.897025] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.908335] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388802, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.937851] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.946717] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.991285] env[61998]: DEBUG oslo_concurrency.lockutils [None req-ff5cc8fd-465d-45d9-9be0-14a755d8b4fa tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "93df4e9a-29d2-4551-9bda-58b02163c116" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.943s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.096907] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5203961a-05f9-a70c-0f44-25e7e52d03b8, 'name': SearchDatastore_Task, 'duration_secs': 0.023594} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.097818] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2358dab-6c57-46c3-93fb-32d088bf55e9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.103095] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 941.103095] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]525f21e0-060b-cf7c-b576-60bfe06eb8bc" [ 941.103095] env[61998]: _type = "Task" [ 941.103095] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.110960] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525f21e0-060b-cf7c-b576-60bfe06eb8bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.220140] env[61998]: DEBUG nova.compute.manager [req-8ff06262-edab-4f71-83e5-711d11445356 req-04a3f212-58a5-4223-9aa6-24b230be571e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-vif-plugged-f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 941.220429] env[61998]: DEBUG oslo_concurrency.lockutils [req-8ff06262-edab-4f71-83e5-711d11445356 req-04a3f212-58a5-4223-9aa6-24b230be571e service nova] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.220931] env[61998]: DEBUG oslo_concurrency.lockutils [req-8ff06262-edab-4f71-83e5-711d11445356 req-04a3f212-58a5-4223-9aa6-24b230be571e service nova] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.221153] env[61998]: DEBUG oslo_concurrency.lockutils [req-8ff06262-edab-4f71-83e5-711d11445356 req-04a3f212-58a5-4223-9aa6-24b230be571e service nova] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.221335] env[61998]: DEBUG nova.compute.manager [req-8ff06262-edab-4f71-83e5-711d11445356 req-04a3f212-58a5-4223-9aa6-24b230be571e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] No waiting events found dispatching network-vif-plugged-f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 941.221562] env[61998]: WARNING nova.compute.manager [req-8ff06262-edab-4f71-83e5-711d11445356 req-04a3f212-58a5-4223-9aa6-24b230be571e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received unexpected event network-vif-plugged-f4cf5059-51bc-4b7e-afa4-aab588228a6c for instance with vm_state building and task_state spawning. [ 941.358224] env[61998]: DEBUG nova.network.neutron [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Successfully updated port: f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 941.369897] env[61998]: INFO nova.compute.manager [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Detaching volume 740a704c-fcec-455a-84c5-cd429ec2a7be [ 941.406574] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388802, 'name': ReconfigVM_Task, 'duration_secs': 0.150153} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.407532] env[61998]: INFO nova.virt.block_device [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Attempting to driver detach volume 740a704c-fcec-455a-84c5-cd429ec2a7be from mountpoint /dev/sdc [ 941.407751] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Volume detach. Driver type: vmdk {{(pid=61998) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 941.407938] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294765', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'name': 'volume-740a704c-fcec-455a-84c5-cd429ec2a7be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'serial': '740a704c-fcec-455a-84c5-cd429ec2a7be'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 941.408361] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294760', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'name': 'volume-834152ba-512a-44f5-b453-523da9f699e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2d0b199f-e0f1-42e0-afb5-e08602aebf01', 'attached_at': '', 'detached_at': '', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'serial': '834152ba-512a-44f5-b453-523da9f699e7'} {{(pid=61998) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 941.409509] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f513f25-d3fc-4454-9600-38bf448db606 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.411881] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0807a8f1-d0aa-4823-bc94-6cfb3057d654 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.435173] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d803f87-4dd0-48ae-aba5-b9b980c36a17 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.437551] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 941.437551] env[61998]: value = "task-1388803" [ 941.437551] env[61998]: _type = "Task" [ 941.437551] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.447848] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388800, 'name': PowerOffVM_Task, 'duration_secs': 0.725575} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.448835] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.449010] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance '43ff4071-05f5-4e5c-a46d-1ca6c99809f0' progress to 17 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 941.452643] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83eda5c-2001-4028-b1bb-2b1f4f134e24 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.460743] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388803, 'name': Rename_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.460977] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388801, 'name': ReconfigVM_Task, 'duration_secs': 0.877023} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.461586] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 34143cac-64e9-41fd-a970-b593d1472d92/34143cac-64e9-41fd-a970-b593d1472d92.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.462170] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63e50812-8682-413c-96c0-c794d8204f59 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.481358] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaaf62d-d463-4349-aff7-663993047d2b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.485676] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 941.485676] env[61998]: value = "task-1388804" [ 941.485676] env[61998]: _type = "Task" [ 941.485676] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.499512] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] The volume has not been displaced from its original location: [datastore1] volume-740a704c-fcec-455a-84c5-cd429ec2a7be/volume-740a704c-fcec-455a-84c5-cd429ec2a7be.vmdk. No consolidation needed. {{(pid=61998) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 941.504885] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfiguring VM instance instance-0000004c to detach disk 2002 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 941.506344] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f830f84-c3e3-48d6-9fdd-953da974a53d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.525162] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388804, 'name': Rename_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.530504] env[61998]: DEBUG oslo_vmware.api [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 941.530504] env[61998]: value = "task-1388805" [ 941.530504] env[61998]: _type = "Task" [ 941.530504] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.538757] env[61998]: DEBUG oslo_vmware.api [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388805, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.613591] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525f21e0-060b-cf7c-b576-60bfe06eb8bc, 'name': SearchDatastore_Task, 'duration_secs': 0.008781} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.613933] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.614244] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] a67aa33f-c7ba-44da-bdfa-e0a53a8538ad/a67aa33f-c7ba-44da-bdfa-e0a53a8538ad.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 941.614544] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-335ede59-fc9b-49e6-b5a7-f2dadfcc5c5c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.620746] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 941.620746] env[61998]: value = "task-1388806" [ 941.620746] env[61998]: _type = "Task" [ 941.620746] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.629033] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.861274] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.861415] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.861616] env[61998]: DEBUG nova.network.neutron [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.948903] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388803, 'name': Rename_Task, 'duration_secs': 0.144784} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.949243] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 941.949541] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5b13730-0fdc-4c86-8e28-cc06a05f8086 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.960950] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.960950] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.961316] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.961370] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.961575] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.961673] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.961900] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.962084] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.962285] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.962468] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.962658] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.969023] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 941.969023] env[61998]: value = "task-1388807" [ 941.969023] env[61998]: _type = "Task" [ 941.969023] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.969389] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a91bee4-0154-4bb7-82c0-acbdc0ae9f8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.990615] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388807, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.995265] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 941.995265] env[61998]: value = "task-1388808" [ 941.995265] env[61998]: _type = "Task" [ 941.995265] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.003458] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388804, 'name': Rename_Task, 'duration_secs': 0.141035} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.003757] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.004021] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bfcf110-fdaa-47dc-b840-d982db2db266 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.010361] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.015779] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 942.015779] env[61998]: value = "task-1388809" [ 942.015779] env[61998]: _type = "Task" [ 942.015779] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.025884] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388809, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.045024] env[61998]: DEBUG oslo_vmware.api [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388805, 'name': ReconfigVM_Task, 'duration_secs': 0.230382} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.045024] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Reconfigured VM instance instance-0000004c to detach disk 2002 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 942.047232] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-193329e9-2c79-4f6b-af0c-4a2bb1946e31 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.064574] env[61998]: DEBUG oslo_vmware.api [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 942.064574] env[61998]: value = "task-1388810" [ 942.064574] env[61998]: _type = "Task" [ 942.064574] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.075066] env[61998]: DEBUG oslo_vmware.api [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388810, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.130764] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460933} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.131071] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] a67aa33f-c7ba-44da-bdfa-e0a53a8538ad/a67aa33f-c7ba-44da-bdfa-e0a53a8538ad.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 942.131296] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 942.131877] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5dc09e26-e233-43bd-a6f0-b4a1d06e5294 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.137843] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 942.137843] env[61998]: value = "task-1388811" [ 942.137843] env[61998]: _type = "Task" [ 942.137843] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.145303] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.396072] env[61998]: DEBUG nova.network.neutron [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.467907] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.468227] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.490680] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388807, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.507171] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388808, 'name': ReconfigVM_Task, 'duration_secs': 0.207174} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.507562] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance '43ff4071-05f5-4e5c-a46d-1ca6c99809f0' progress to 33 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 942.525511] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388809, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.574681] env[61998]: DEBUG oslo_vmware.api [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388810, 'name': ReconfigVM_Task, 'duration_secs': 0.229565} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.574852] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294765', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'name': 'volume-740a704c-fcec-455a-84c5-cd429ec2a7be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e', 'attached_at': '', 'detached_at': '', 'volume_id': '740a704c-fcec-455a-84c5-cd429ec2a7be', 'serial': '740a704c-fcec-455a-84c5-cd429ec2a7be'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 942.580755] env[61998]: DEBUG nova.network.neutron [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.648466] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065622} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.648811] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 942.649627] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a34f54d-e25a-480e-ac77-18669cefa4f9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.671824] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] a67aa33f-c7ba-44da-bdfa-e0a53a8538ad/a67aa33f-c7ba-44da-bdfa-e0a53a8538ad.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.672164] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f658c89a-4261-4378-892e-631d6a2c3f40 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.691128] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 942.691128] env[61998]: value = "task-1388812" [ 942.691128] env[61998]: _type = "Task" [ 942.691128] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.698935] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.971454] env[61998]: DEBUG nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 942.991696] env[61998]: DEBUG oslo_vmware.api [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388807, 'name': PowerOnVM_Task, 'duration_secs': 0.535453} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.991959] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.013724] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.014047] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.014244] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.014442] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.014595] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.014747] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.014992] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.015178] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.015353] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.015514] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.015687] env[61998]: DEBUG nova.virt.hardware [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.021423] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 943.022106] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc66e907-6c0c-4a29-939e-e45898fd1c5f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.044449] env[61998]: DEBUG oslo_vmware.api [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388809, 'name': PowerOnVM_Task, 'duration_secs': 0.737975} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.045623] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.045822] env[61998]: INFO nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Took 9.03 seconds to spawn the instance on the hypervisor. [ 943.046088] env[61998]: DEBUG nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 943.046435] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 943.046435] env[61998]: value = "task-1388813" [ 943.046435] env[61998]: _type = "Task" [ 943.046435] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.047118] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c21c203-da4a-4bcc-a2ae-bbe3b497d0ad {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.060591] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388813, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.083542] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.083902] env[61998]: DEBUG nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Instance network_info: |[{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 943.084373] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:ca:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4cf5059-51bc-4b7e-afa4-aab588228a6c', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 943.093048] env[61998]: DEBUG oslo.service.loopingcall [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.093570] env[61998]: DEBUG nova.compute.manager [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 943.094584] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 943.095407] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8861162e-968f-4257-8e49-d120a40ab62f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.098249] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45b165e4-8cf0-42b5-8ef9-bffd35630a3d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.120661] env[61998]: DEBUG nova.objects.instance [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lazy-loading 'flavor' on Instance uuid 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.123009] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.123009] env[61998]: value = "task-1388814" [ 943.123009] env[61998]: _type = "Task" [ 943.123009] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.132713] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388814, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.201116] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.246131] env[61998]: DEBUG nova.compute.manager [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-changed-f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 943.246311] env[61998]: DEBUG nova.compute.manager [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing instance network info cache due to event network-changed-f4cf5059-51bc-4b7e-afa4-aab588228a6c. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 943.246596] env[61998]: DEBUG oslo_concurrency.lockutils [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.246747] env[61998]: DEBUG oslo_concurrency.lockutils [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.246935] env[61998]: DEBUG nova.network.neutron [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing network info cache for port f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.500475] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.500750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.502376] env[61998]: INFO nova.compute.claims [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.560519] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388813, 'name': ReconfigVM_Task, 'duration_secs': 0.483081} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.560875] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 943.566174] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bc7dfa-698e-4564-bfd5-66c370126b9c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.571342] env[61998]: INFO nova.compute.manager [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Took 25.40 seconds to build instance. [ 943.596243] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 43ff4071-05f5-4e5c-a46d-1ca6c99809f0/43ff4071-05f5-4e5c-a46d-1ca6c99809f0.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.597786] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fc46b03-187e-4cc5-8ab6-53b6f97896a1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.616035] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 943.616035] env[61998]: value = "task-1388815" [ 943.616035] env[61998]: _type = "Task" [ 943.616035] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.630058] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.637022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9c9b529c-f12b-475f-b975-e76603b0c51e tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 37.067s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.644140] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388814, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.701525] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388812, 'name': ReconfigVM_Task, 'duration_secs': 0.934202} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.701883] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Reconfigured VM instance instance-00000057 to attach disk [datastore1] a67aa33f-c7ba-44da-bdfa-e0a53a8538ad/a67aa33f-c7ba-44da-bdfa-e0a53a8538ad.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.702519] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08fe194d-a77b-4740-8e28-6fcc611da3a1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.709108] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 943.709108] env[61998]: value = "task-1388816" [ 943.709108] env[61998]: _type = "Task" [ 943.709108] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.716911] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388816, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.910625] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "34143cac-64e9-41fd-a970-b593d1472d92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.073414] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87a3acae-613d-4142-8009-5980f7000fb4 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "34143cac-64e9-41fd-a970-b593d1472d92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.913s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.073712] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "34143cac-64e9-41fd-a970-b593d1472d92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.163s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.073939] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "34143cac-64e9-41fd-a970-b593d1472d92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.074183] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "34143cac-64e9-41fd-a970-b593d1472d92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.074367] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "34143cac-64e9-41fd-a970-b593d1472d92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.076521] env[61998]: INFO nova.compute.manager [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Terminating instance [ 944.078296] env[61998]: DEBUG nova.compute.manager [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 944.078504] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.079381] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c00801d-104e-41ee-910c-a2d13c20a257 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.086669] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.086894] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53fb50b1-272b-4000-b3e8-beee5424ab7c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.092301] env[61998]: DEBUG oslo_vmware.api [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 944.092301] env[61998]: value = "task-1388817" [ 944.092301] env[61998]: _type = "Task" [ 944.092301] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.100061] env[61998]: DEBUG oslo_vmware.api [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.124461] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388815, 'name': ReconfigVM_Task, 'duration_secs': 0.286779} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.124730] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 43ff4071-05f5-4e5c-a46d-1ca6c99809f0/43ff4071-05f5-4e5c-a46d-1ca6c99809f0.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.125054] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance '43ff4071-05f5-4e5c-a46d-1ca6c99809f0' progress to 50 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 944.135094] env[61998]: DEBUG nova.network.neutron [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updated VIF entry in instance network info cache for port f4cf5059-51bc-4b7e-afa4-aab588228a6c. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 944.135474] env[61998]: DEBUG nova.network.neutron [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.140160] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a161ead3-ea76-47d4-b1a2-83be22b8e45d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.275s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.141764] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388814, 'name': CreateVM_Task, 'duration_secs': 0.547069} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.142114] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 944.142857] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.143057] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.143466] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 944.143985] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bcee120-8916-4c52-8491-5e46505b0da0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.149715] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 944.149715] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529cd7df-3072-ff64-ecc4-ef08d4971362" [ 944.149715] env[61998]: _type = "Task" [ 944.149715] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.159096] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529cd7df-3072-ff64-ecc4-ef08d4971362, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.218554] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388816, 'name': Rename_Task, 'duration_secs': 0.147371} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.218794] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 944.219063] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d6747f9-efc5-4a0a-b959-2e6183fb4af9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.225522] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 944.225522] env[61998]: value = "task-1388818" [ 944.225522] env[61998]: _type = "Task" [ 944.225522] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.234292] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.603442] env[61998]: DEBUG oslo_vmware.api [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388817, 'name': PowerOffVM_Task, 'duration_secs': 0.172171} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.605976] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.606222] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.606701] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fec48b2-acf5-4c08-9cc3-d213799d2061 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.631827] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea1cda7-a832-4de9-a09c-bc3abcc13fb9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.634937] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c338ffe-b7a7-4261-8693-fcd6ac961736 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.657451] env[61998]: DEBUG oslo_concurrency.lockutils [req-1daf0357-958e-4671-80a4-f3bc629c5ae0 req-3b88fa7a-2453-4571-9ad1-05afcb05761e service nova] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.658579] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e88d29-3628-47c1-97b1-56bada78bd48 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.665484] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21097493-ea92-4cc3-b0e5-16bb2176bc13 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.670945] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.671226] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.671383] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Deleting the datastore file [datastore1] 34143cac-64e9-41fd-a970-b593d1472d92 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.671632] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e52a2dae-f3f1-4004-83e9-20b796a3db6e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.711984] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance '43ff4071-05f5-4e5c-a46d-1ca6c99809f0' progress to 67 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 944.715407] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529cd7df-3072-ff64-ecc4-ef08d4971362, 'name': SearchDatastore_Task, 'duration_secs': 0.011882} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.718108] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.718108] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.718108] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.718108] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.718108] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.718322] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad111dd-e1be-4faa-a23c-9be47cde590b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.721864] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2889dd7c-7b29-42b7-821d-f7f838288503 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.723682] env[61998]: DEBUG oslo_vmware.api [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for the task: (returnval){ [ 944.723682] env[61998]: value = "task-1388820" [ 944.723682] env[61998]: _type = "Task" [ 944.723682] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.734975] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8289bda4-cae5-49ce-aa1d-e29f8f2655a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.742865] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.742865] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 944.743403] env[61998]: DEBUG oslo_vmware.api [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.744225] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1be74ee-30a3-44e9-ae52-2899f4ac1774 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.757449] env[61998]: DEBUG nova.compute.provider_tree [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.758918] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388818, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.765801] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 944.765801] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5253da15-a8ce-2a9c-1882-b3889cb5040a" [ 944.765801] env[61998]: _type = "Task" [ 944.765801] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.773787] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5253da15-a8ce-2a9c-1882-b3889cb5040a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.233773] env[61998]: DEBUG oslo_vmware.api [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Task: {'id': task-1388820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204532} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.236680] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.236894] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.237103] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.237296] env[61998]: INFO nova.compute.manager [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Took 1.16 seconds to destroy the instance on the hypervisor. [ 945.237584] env[61998]: DEBUG oslo.service.loopingcall [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.238265] env[61998]: DEBUG nova.compute.manager [-] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 945.238265] env[61998]: DEBUG nova.network.neutron [-] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.244536] env[61998]: DEBUG oslo_vmware.api [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388818, 'name': PowerOnVM_Task, 'duration_secs': 0.666672} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.244806] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 945.245025] env[61998]: INFO nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Took 8.98 seconds to spawn the instance on the hypervisor. [ 945.245208] env[61998]: DEBUG nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 945.246031] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e39394-5071-47fb-b0a4-3392aa5bad09 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.254863] env[61998]: DEBUG nova.network.neutron [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Port 9f5103b5-d9cc-4978-9140-901834a6af10 binding to destination host cpu-1 is already ACTIVE {{(pid=61998) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 945.260931] env[61998]: DEBUG nova.scheduler.client.report [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 945.276458] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.276723] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.276962] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.277177] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.277398] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.278923] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5253da15-a8ce-2a9c-1882-b3889cb5040a, 'name': SearchDatastore_Task, 'duration_secs': 0.009711} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.279394] env[61998]: INFO nova.compute.manager [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Terminating instance [ 945.281742] env[61998]: DEBUG nova.compute.manager [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 945.281960] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.282254] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e17a6e79-d5ba-4fce-96ac-50d1d2b8671d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.285445] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1e7b5b-b3d9-47ba-a573-6abf8f9b4459 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.293450] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.294349] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73cb4891-4665-4721-9dbe-d9744e3318a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.296372] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 945.296372] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52791af6-4c50-c591-8a51-d7799e3b5b81" [ 945.296372] env[61998]: _type = "Task" [ 945.296372] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.301147] env[61998]: DEBUG oslo_vmware.api [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 945.301147] env[61998]: value = "task-1388821" [ 945.301147] env[61998]: _type = "Task" [ 945.301147] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.307698] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52791af6-4c50-c591-8a51-d7799e3b5b81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.312647] env[61998]: DEBUG oslo_vmware.api [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.495967] env[61998]: DEBUG nova.compute.manager [req-fd571d9c-3534-477e-b859-7ef52155e040 req-451ba639-f1ba-46fa-afce-7cf8753238d8 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Received event network-vif-deleted-420f3548-5f01-4335-970c-a39ffe789c13 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 945.496349] env[61998]: INFO nova.compute.manager [req-fd571d9c-3534-477e-b859-7ef52155e040 req-451ba639-f1ba-46fa-afce-7cf8753238d8 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Neutron deleted interface 420f3548-5f01-4335-970c-a39ffe789c13; detaching it from the instance and deleting it from the info cache [ 945.496656] env[61998]: DEBUG nova.network.neutron [req-fd571d9c-3534-477e-b859-7ef52155e040 req-451ba639-f1ba-46fa-afce-7cf8753238d8 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.770735] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.771400] env[61998]: DEBUG nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 945.773819] env[61998]: INFO nova.compute.manager [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Took 24.88 seconds to build instance. [ 945.811606] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52791af6-4c50-c591-8a51-d7799e3b5b81, 'name': SearchDatastore_Task, 'duration_secs': 0.012114} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.812375] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.812662] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 58626303-4d70-48bb-9aaf-1b54cef92a76/58626303-4d70-48bb-9aaf-1b54cef92a76.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 945.813202] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc4ddabd-10ea-4872-8fec-4f5e6f115e79 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.819072] env[61998]: DEBUG oslo_vmware.api [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388821, 'name': PowerOffVM_Task, 'duration_secs': 0.180535} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.819693] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.819893] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.820174] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c0e21df-ae16-4706-bd93-2d7790e8fb27 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.823212] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 945.823212] env[61998]: value = "task-1388822" [ 945.823212] env[61998]: _type = "Task" [ 945.823212] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.830860] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388822, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.884128] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.884442] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.884677] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Deleting the datastore file [datastore2] 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.885313] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09ba62a0-8a72-4be6-8e4d-63f034e92e03 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.891263] env[61998]: DEBUG oslo_vmware.api [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for the task: (returnval){ [ 945.891263] env[61998]: value = "task-1388824" [ 945.891263] env[61998]: _type = "Task" [ 945.891263] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.898951] env[61998]: DEBUG oslo_vmware.api [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.970136] env[61998]: DEBUG nova.network.neutron [-] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.000119] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d623f391-e49c-41d8-a6d0-5961b91357af {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.010427] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fdcdcc-5511-49d2-af1b-3d1b4d11fbc0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.038135] env[61998]: DEBUG nova.compute.manager [req-fd571d9c-3534-477e-b859-7ef52155e040 req-451ba639-f1ba-46fa-afce-7cf8753238d8 service nova] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Detach interface failed, port_id=420f3548-5f01-4335-970c-a39ffe789c13, reason: Instance 34143cac-64e9-41fd-a970-b593d1472d92 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 946.279519] env[61998]: DEBUG nova.compute.utils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 946.280743] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15295323-56a8-4a84-8858-4a92713b4007 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.393s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.284159] env[61998]: DEBUG nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 946.284412] env[61998]: DEBUG nova.network.neutron [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.291015] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.291262] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.291440] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.335705] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388822, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.340929] env[61998]: DEBUG nova.policy [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17f109d724201a22264aa6ee02ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82b8854f80cf48628167fd6f678d7dd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 946.374151] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.374646] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.374992] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 946.400632] env[61998]: DEBUG oslo_vmware.api [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Task: {'id': task-1388824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345509} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.400888] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.401186] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.401377] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.401551] env[61998]: INFO nova.compute.manager [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 946.401791] env[61998]: DEBUG oslo.service.loopingcall [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.401986] env[61998]: DEBUG nova.compute.manager [-] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 946.402095] env[61998]: DEBUG nova.network.neutron [-] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.473016] env[61998]: INFO nova.compute.manager [-] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Took 1.23 seconds to deallocate network for instance. [ 946.593034] env[61998]: DEBUG nova.network.neutron [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Successfully created port: 12db2882-2081-4fca-b174-a9af8c543a13 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.756595] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.756848] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.782437] env[61998]: DEBUG nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 946.835926] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388822, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.591723} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.836235] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 58626303-4d70-48bb-9aaf-1b54cef92a76/58626303-4d70-48bb-9aaf-1b54cef92a76.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 946.836554] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 946.836905] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59967d87-ffbb-4f53-b6fb-f94581c927b8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.844772] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 946.844772] env[61998]: value = "task-1388825" [ 946.844772] env[61998]: _type = "Task" [ 946.844772] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.852319] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.928521] env[61998]: DEBUG nova.compute.manager [req-f176d715-0a99-43df-9728-ed9d08db4974 req-a5aa4a57-a84f-4c3e-b9c2-4088cef4471f service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Received event network-vif-deleted-2c555663-7a18-4eba-9038-f975654d0400 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 946.928789] env[61998]: INFO nova.compute.manager [req-f176d715-0a99-43df-9728-ed9d08db4974 req-a5aa4a57-a84f-4c3e-b9c2-4088cef4471f service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Neutron deleted interface 2c555663-7a18-4eba-9038-f975654d0400; detaching it from the instance and deleting it from the info cache [ 946.929049] env[61998]: DEBUG nova.network.neutron [req-f176d715-0a99-43df-9728-ed9d08db4974 req-a5aa4a57-a84f-4c3e-b9c2-4088cef4471f service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.980680] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.980888] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.981096] env[61998]: DEBUG nova.objects.instance [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lazy-loading 'resources' on Instance uuid 34143cac-64e9-41fd-a970-b593d1472d92 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.260049] env[61998]: DEBUG nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 947.346397] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.346654] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.346878] env[61998]: DEBUG nova.network.neutron [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.356778] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077697} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.357009] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.357815] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0daa91b-e362-42f3-9fbf-99cd0a8e9572 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.380281] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 58626303-4d70-48bb-9aaf-1b54cef92a76/58626303-4d70-48bb-9aaf-1b54cef92a76.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.381741] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6081d2a0-6f6e-4c97-b566-59488118c1c7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.400389] env[61998]: DEBUG nova.network.neutron [-] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.403112] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 947.403112] env[61998]: value = "task-1388826" [ 947.403112] env[61998]: _type = "Task" [ 947.403112] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.411429] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388826, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.432116] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64ba8899-f5b5-479c-9688-a81b47d96ae2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.441256] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5790a0f9-3553-4483-a046-9a23d9ca012e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.466788] env[61998]: DEBUG nova.compute.manager [req-f176d715-0a99-43df-9728-ed9d08db4974 req-a5aa4a57-a84f-4c3e-b9c2-4088cef4471f service nova] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Detach interface failed, port_id=2c555663-7a18-4eba-9038-f975654d0400, reason: Instance 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 947.612313] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c3eba3-b343-44b9-be01-4722ce9838b3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.621189] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a24b69-7605-4781-8183-ef02eb2d952c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.654876] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c722ca-3028-42f7-9286-ddcd7c210e17 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.663270] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafeba79-f659-4ef0-ae17-3acf2269e5d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.676818] env[61998]: DEBUG nova.compute.provider_tree [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.780304] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.792684] env[61998]: DEBUG nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 947.820392] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 947.820700] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 947.820867] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.821074] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 947.821232] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.821386] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 947.821602] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 947.821770] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 947.821939] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 947.822118] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 947.822303] env[61998]: DEBUG nova.virt.hardware [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 947.823211] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f9440e-a17d-4136-a07d-075501f2bec4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.831145] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eab664-3c2e-4373-80c9-5c08b0a0146f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.902381] env[61998]: INFO nova.compute.manager [-] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Took 1.50 seconds to deallocate network for instance. [ 947.917536] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388826, 'name': ReconfigVM_Task, 'duration_secs': 0.450749} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.918642] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 58626303-4d70-48bb-9aaf-1b54cef92a76/58626303-4d70-48bb-9aaf-1b54cef92a76.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 947.922435] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f09d41a-b722-44b0-a99a-6c52db6b49be {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.929095] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 947.929095] env[61998]: value = "task-1388827" [ 947.929095] env[61998]: _type = "Task" [ 947.929095] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.937307] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388827, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.008012] env[61998]: DEBUG nova.compute.manager [req-fcc181f9-b419-40cf-bc4f-0c224ad78419 req-89619944-4fe2-4e7d-9ba4-86861d8c2f96 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Received event network-vif-plugged-12db2882-2081-4fca-b174-a9af8c543a13 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 948.008012] env[61998]: DEBUG oslo_concurrency.lockutils [req-fcc181f9-b419-40cf-bc4f-0c224ad78419 req-89619944-4fe2-4e7d-9ba4-86861d8c2f96 service nova] Acquiring lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.008012] env[61998]: DEBUG oslo_concurrency.lockutils [req-fcc181f9-b419-40cf-bc4f-0c224ad78419 req-89619944-4fe2-4e7d-9ba4-86861d8c2f96 service nova] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.008012] env[61998]: DEBUG oslo_concurrency.lockutils [req-fcc181f9-b419-40cf-bc4f-0c224ad78419 req-89619944-4fe2-4e7d-9ba4-86861d8c2f96 service nova] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.008012] env[61998]: DEBUG nova.compute.manager [req-fcc181f9-b419-40cf-bc4f-0c224ad78419 req-89619944-4fe2-4e7d-9ba4-86861d8c2f96 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] No waiting events found dispatching network-vif-plugged-12db2882-2081-4fca-b174-a9af8c543a13 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.008012] env[61998]: WARNING nova.compute.manager [req-fcc181f9-b419-40cf-bc4f-0c224ad78419 req-89619944-4fe2-4e7d-9ba4-86861d8c2f96 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Received unexpected event network-vif-plugged-12db2882-2081-4fca-b174-a9af8c543a13 for instance with vm_state building and task_state spawning. [ 948.096782] env[61998]: DEBUG nova.network.neutron [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance_info_cache with network_info: [{"id": "9f5103b5-d9cc-4978-9140-901834a6af10", "address": "fa:16:3e:91:a2:96", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5103b5-d9", "ovs_interfaceid": "9f5103b5-d9cc-4978-9140-901834a6af10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.104657] env[61998]: DEBUG nova.network.neutron [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Successfully updated port: 12db2882-2081-4fca-b174-a9af8c543a13 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.180581] env[61998]: DEBUG nova.scheduler.client.report [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 948.412234] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.426910] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.427097] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquired lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.427255] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Forcefully refreshing network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 948.438809] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388827, 'name': Rename_Task, 'duration_secs': 0.156163} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.439104] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.439340] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c8fe9e2-b625-42c4-8ef6-3a842ea4630b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.445594] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 948.445594] env[61998]: value = "task-1388828" [ 948.445594] env[61998]: _type = "Task" [ 948.445594] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.454566] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388828, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.599074] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.606690] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-c924f793-852e-4f45-85b1-b1e3fdc5d60d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.606859] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-c924f793-852e-4f45-85b1-b1e3fdc5d60d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.607028] env[61998]: DEBUG nova.network.neutron [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.685895] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.688271] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.908s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.689800] env[61998]: INFO nova.compute.claims [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.710822] env[61998]: INFO nova.scheduler.client.report [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Deleted allocations for instance 34143cac-64e9-41fd-a970-b593d1472d92 [ 948.957452] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388828, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.124404] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30b9702-bc9a-448d-bafa-3af9596399f7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.143156] env[61998]: DEBUG nova.network.neutron [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.145451] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070a96cd-4f40-4706-a852-52ccec7bbaa6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.152552] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance '43ff4071-05f5-4e5c-a46d-1ca6c99809f0' progress to 83 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 949.218703] env[61998]: DEBUG oslo_concurrency.lockutils [None req-87ff011f-6877-4cd2-bcb6-1b412a2f7934 tempest-InstanceActionsV221TestJSON-1026159100 tempest-InstanceActionsV221TestJSON-1026159100-project-member] Lock "34143cac-64e9-41fd-a970-b593d1472d92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.145s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.304438] env[61998]: DEBUG nova.network.neutron [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Updating instance_info_cache with network_info: [{"id": "12db2882-2081-4fca-b174-a9af8c543a13", "address": "fa:16:3e:19:10:6d", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12db2882-20", "ovs_interfaceid": "12db2882-2081-4fca-b174-a9af8c543a13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.457226] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388828, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.659472] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.659751] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8adee3d4-d84f-4373-af01-aa3c12923165 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.665988] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 949.665988] env[61998]: value = "task-1388829" [ 949.665988] env[61998]: _type = "Task" [ 949.665988] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.673494] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.684808] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [{"id": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "address": "fa:16:3e:95:c1:87", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2ff8f0-d7", "ovs_interfaceid": "da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.806874] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-c924f793-852e-4f45-85b1-b1e3fdc5d60d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.807199] env[61998]: DEBUG nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Instance network_info: |[{"id": "12db2882-2081-4fca-b174-a9af8c543a13", "address": "fa:16:3e:19:10:6d", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12db2882-20", "ovs_interfaceid": "12db2882-2081-4fca-b174-a9af8c543a13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 949.808177] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eadd956-cc3e-4dcd-bc53-89e6f1964c34 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.811079] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:10:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12db2882-2081-4fca-b174-a9af8c543a13', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.821207] env[61998]: DEBUG oslo.service.loopingcall [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.821439] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.822103] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f1ecdcb-39c0-4386-9b93-34731b7e9ef9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.841486] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d723c88-a8d0-41b0-9143-8ec84cdb6512 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.845832] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.845832] env[61998]: value = "task-1388830" [ 949.845832] env[61998]: _type = "Task" [ 949.845832] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.877038] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1be3955-8311-45a6-b355-081a10d216cb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.883113] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388830, 'name': CreateVM_Task} progress is 15%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.888159] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a795ec-029b-411f-9886-1c939bd645f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.901893] env[61998]: DEBUG nova.compute.provider_tree [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.959069] env[61998]: DEBUG oslo_vmware.api [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388828, 'name': PowerOnVM_Task, 'duration_secs': 1.036338} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.959496] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.959721] env[61998]: INFO nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Took 9.19 seconds to spawn the instance on the hypervisor. [ 949.959955] env[61998]: DEBUG nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 949.960897] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfe2249-1e16-446e-b5af-c1966eacbbef {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.039682] env[61998]: DEBUG nova.compute.manager [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Received event network-changed-12db2882-2081-4fca-b174-a9af8c543a13 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 950.040009] env[61998]: DEBUG nova.compute.manager [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Refreshing instance network info cache due to event network-changed-12db2882-2081-4fca-b174-a9af8c543a13. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 950.040300] env[61998]: DEBUG oslo_concurrency.lockutils [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] Acquiring lock "refresh_cache-c924f793-852e-4f45-85b1-b1e3fdc5d60d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.040530] env[61998]: DEBUG oslo_concurrency.lockutils [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] Acquired lock "refresh_cache-c924f793-852e-4f45-85b1-b1e3fdc5d60d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.040712] env[61998]: DEBUG nova.network.neutron [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Refreshing network info cache for port 12db2882-2081-4fca-b174-a9af8c543a13 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.177953] env[61998]: DEBUG oslo_vmware.api [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388829, 'name': PowerOnVM_Task, 'duration_secs': 0.402245} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.178553] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.178553] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61a5eed5-b35d-4394-a366-fe08e6683e2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance '43ff4071-05f5-4e5c-a46d-1ca6c99809f0' progress to 100 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 950.187253] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Releasing lock "refresh_cache-2d0b199f-e0f1-42e0-afb5-e08602aebf01" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.187253] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updated the network info_cache for instance {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10298}} [ 950.187253] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.187614] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.187614] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.187687] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.187882] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.188100] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.188192] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 950.188344] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.355922] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388830, 'name': CreateVM_Task, 'duration_secs': 0.324206} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.356241] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.357044] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.357287] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.357708] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.358055] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f066b73b-e0d4-4cfb-b8e2-5d4d0a602ef4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.363194] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 950.363194] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529a53f1-39ce-d76b-da1e-6b6db6221e81" [ 950.363194] env[61998]: _type = "Task" [ 950.363194] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.372716] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529a53f1-39ce-d76b-da1e-6b6db6221e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.409790] env[61998]: DEBUG nova.scheduler.client.report [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 950.481982] env[61998]: INFO nova.compute.manager [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Took 20.93 seconds to build instance. [ 950.690637] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.748901] env[61998]: DEBUG nova.network.neutron [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Updated VIF entry in instance network info cache for port 12db2882-2081-4fca-b174-a9af8c543a13. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.749330] env[61998]: DEBUG nova.network.neutron [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Updating instance_info_cache with network_info: [{"id": "12db2882-2081-4fca-b174-a9af8c543a13", "address": "fa:16:3e:19:10:6d", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12db2882-20", "ovs_interfaceid": "12db2882-2081-4fca-b174-a9af8c543a13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.873748] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]529a53f1-39ce-d76b-da1e-6b6db6221e81, 'name': SearchDatastore_Task, 'duration_secs': 0.009849} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.874218] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.874349] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.874732] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.874788] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.874946] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.875228] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9046db7a-6823-4486-b9c1-271559862099 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.888170] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.888363] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.889163] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a74d33a-f23f-472f-b1af-0ad7c52d554e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.894054] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 950.894054] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52fdd997-e6df-b53b-4a06-a93065846e16" [ 950.894054] env[61998]: _type = "Task" [ 950.894054] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.901832] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52fdd997-e6df-b53b-4a06-a93065846e16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.914748] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.915299] env[61998]: DEBUG nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 950.917916] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.506s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.918157] env[61998]: DEBUG nova.objects.instance [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lazy-loading 'resources' on Instance uuid 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.984469] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ada135a-fd67-490c-b172-fcdda1cbfa16 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.440s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.251761] env[61998]: DEBUG oslo_concurrency.lockutils [req-6f3212e0-5806-4787-9736-9c4de637e012 req-972f1a91-3dc8-4ed9-9c13-77c9521d4889 service nova] Releasing lock "refresh_cache-c924f793-852e-4f45-85b1-b1e3fdc5d60d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.404829] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52fdd997-e6df-b53b-4a06-a93065846e16, 'name': SearchDatastore_Task, 'duration_secs': 0.034537} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.405656] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ae52b9c-7796-4b7e-a6c7-65a6cc947d7c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.411020] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 951.411020] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528346dd-92cd-454c-14e9-e1946aa5abb7" [ 951.411020] env[61998]: _type = "Task" [ 951.411020] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.421074] env[61998]: DEBUG nova.compute.utils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 951.432264] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528346dd-92cd-454c-14e9-e1946aa5abb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.433661] env[61998]: DEBUG nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 951.437098] env[61998]: DEBUG nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 951.437280] env[61998]: DEBUG nova.network.neutron [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 951.520089] env[61998]: DEBUG nova.policy [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '744da696f7c64f62ae04195aa737fab4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c75c9b7c8d6b441d80fe512c37c88679', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 951.562292] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce25d6a7-fd62-4a55-9f1c-e9e78e3309df {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.570290] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945dcbbc-fa47-4e95-bdd2-35aa0aa03b1b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.607047] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4601c06d-9b99-45ac-b8c3-f1e6f6c2caf5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.615987] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030aed53-246c-44d7-a107-c4218369442b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.630187] env[61998]: DEBUG nova.compute.provider_tree [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.825415] env[61998]: DEBUG nova.compute.manager [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-changed-f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 951.825724] env[61998]: DEBUG nova.compute.manager [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing instance network info cache due to event network-changed-f4cf5059-51bc-4b7e-afa4-aab588228a6c. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 951.828691] env[61998]: DEBUG oslo_concurrency.lockutils [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.828691] env[61998]: DEBUG oslo_concurrency.lockutils [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.828691] env[61998]: DEBUG nova.network.neutron [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing network info cache for port f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.926110] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528346dd-92cd-454c-14e9-e1946aa5abb7, 'name': SearchDatastore_Task, 'duration_secs': 0.033506} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.926501] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.927062] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] c924f793-852e-4f45-85b1-b1e3fdc5d60d/c924f793-852e-4f45-85b1-b1e3fdc5d60d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.927630] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7477aeff-056e-4431-b331-4be1f0c4fa78 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.937094] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 951.937094] env[61998]: value = "task-1388831" [ 951.937094] env[61998]: _type = "Task" [ 951.937094] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.952870] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.058788] env[61998]: DEBUG nova.network.neutron [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Successfully created port: 39cacd52-c03f-4a41-b9bf-6afdc250e017 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.133566] env[61998]: DEBUG nova.scheduler.client.report [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 952.158827] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.159170] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.159392] env[61998]: DEBUG nova.compute.manager [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Going to confirm migration 1 {{(pid=61998) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5082}} [ 952.450336] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388831, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.454863] env[61998]: DEBUG nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 952.492070] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.492470] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.492968] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.493257] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.493427] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.493882] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.494176] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.494765] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.495051] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.495256] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.495446] env[61998]: DEBUG nova.virt.hardware [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.497090] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75d88e2-d5be-4e10-b348-52cbffb6bf80 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.510695] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564f3e94-3b4a-4a54-b0d3-9ecadad6edec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.595217] env[61998]: DEBUG nova.network.neutron [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updated VIF entry in instance network info cache for port f4cf5059-51bc-4b7e-afa4-aab588228a6c. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.595612] env[61998]: DEBUG nova.network.neutron [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.643696] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.726s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.646552] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.956s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.646795] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.647039] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 952.647915] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1172a646-0310-4683-ba73-babb610d2350 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.656650] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6bf279-8931-492b-82b5-f765aa1a83f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.682542] env[61998]: INFO nova.scheduler.client.report [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Deleted allocations for instance 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e [ 952.682542] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd314c47-0303-4725-9eff-68c838e391fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.696506] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912012e5-2360-44ea-99b9-3e8011cd7ae0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.729673] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180158MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 952.729673] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.729673] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.765210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.765365] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.765550] env[61998]: DEBUG nova.network.neutron [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.765749] env[61998]: DEBUG nova.objects.instance [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lazy-loading 'info_cache' on Instance uuid 43ff4071-05f5-4e5c-a46d-1ca6c99809f0 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.956052] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67735} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.959025] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] c924f793-852e-4f45-85b1-b1e3fdc5d60d/c924f793-852e-4f45-85b1-b1e3fdc5d60d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.959025] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.959025] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edd6d18f-372c-4cfd-9032-19d499a2fce8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.965855] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 952.965855] env[61998]: value = "task-1388832" [ 952.965855] env[61998]: _type = "Task" [ 952.965855] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.981373] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.098058] env[61998]: DEBUG oslo_concurrency.lockutils [req-9f0c335b-be41-482a-8d31-bc551ca94535 req-005ffa0a-cbf9-4017-a29f-b08adfb3061d service nova] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.195755] env[61998]: DEBUG oslo_concurrency.lockutils [None req-2ba673ff-642b-47df-b7f8-4712812ed12d tempest-AttachVolumeTestJSON-1102163902 tempest-AttachVolumeTestJSON-1102163902-project-member] Lock "0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.919s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.476427] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067381} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.477320] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.477695] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef791bd-9262-4c08-b284-5fef6b0eaf7b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.501276] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] c924f793-852e-4f45-85b1-b1e3fdc5d60d/c924f793-852e-4f45-85b1-b1e3fdc5d60d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.501571] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8b48627-3e69-4d5d-a43b-c2cd549c450d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.520996] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 953.520996] env[61998]: value = "task-1388833" [ 953.520996] env[61998]: _type = "Task" [ 953.520996] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.529058] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388833, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.738058] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Applying migration context for instance 43ff4071-05f5-4e5c-a46d-1ca6c99809f0 as it has an incoming, in-progress migration 3ba573ea-8f31-4ccb-921a-24186b139c2d. Migration status is confirming {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 953.740510] env[61998]: INFO nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating resource usage from migration 3ba573ea-8f31-4ccb-921a-24186b139c2d [ 953.769428] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 2d0b199f-e0f1-42e0-afb5-e08602aebf01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.769593] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance a67aa33f-c7ba-44da-bdfa-e0a53a8538ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.769721] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Migration 3ba573ea-8f31-4ccb-921a-24186b139c2d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 953.769871] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 43ff4071-05f5-4e5c-a46d-1ca6c99809f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.769960] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 58626303-4d70-48bb-9aaf-1b54cef92a76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.770086] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance c924f793-852e-4f45-85b1-b1e3fdc5d60d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.770202] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 953.770517] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 953.770517] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 953.789228] env[61998]: DEBUG nova.compute.manager [req-40d14805-0791-4e32-b8e4-b0fb97bd5c56 req-9dd4520f-f534-4e85-9e71-dd118664a9b9 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Received event network-vif-plugged-39cacd52-c03f-4a41-b9bf-6afdc250e017 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 953.789448] env[61998]: DEBUG oslo_concurrency.lockutils [req-40d14805-0791-4e32-b8e4-b0fb97bd5c56 req-9dd4520f-f534-4e85-9e71-dd118664a9b9 service nova] Acquiring lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.789655] env[61998]: DEBUG oslo_concurrency.lockutils [req-40d14805-0791-4e32-b8e4-b0fb97bd5c56 req-9dd4520f-f534-4e85-9e71-dd118664a9b9 service nova] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.789826] env[61998]: DEBUG oslo_concurrency.lockutils [req-40d14805-0791-4e32-b8e4-b0fb97bd5c56 req-9dd4520f-f534-4e85-9e71-dd118664a9b9 service nova] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.789995] env[61998]: DEBUG nova.compute.manager [req-40d14805-0791-4e32-b8e4-b0fb97bd5c56 req-9dd4520f-f534-4e85-9e71-dd118664a9b9 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] No waiting events found dispatching network-vif-plugged-39cacd52-c03f-4a41-b9bf-6afdc250e017 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 953.790194] env[61998]: WARNING nova.compute.manager [req-40d14805-0791-4e32-b8e4-b0fb97bd5c56 req-9dd4520f-f534-4e85-9e71-dd118664a9b9 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Received unexpected event network-vif-plugged-39cacd52-c03f-4a41-b9bf-6afdc250e017 for instance with vm_state building and task_state spawning. [ 953.913495] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22785931-8151-477c-bb60-23186dd4362b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.923210] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48760f24-c3a4-4e38-8742-ed1c5d0fbb45 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.964316] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04945b6-676d-4862-96f2-7f8fca7ae64d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.973387] env[61998]: DEBUG nova.network.neutron [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Successfully updated port: 39cacd52-c03f-4a41-b9bf-6afdc250e017 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.979902] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b929d29-6be0-49fb-9988-8359230bcaea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.994542] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.031990] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388833, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.199266] env[61998]: DEBUG nova.network.neutron [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance_info_cache with network_info: [{"id": "9f5103b5-d9cc-4978-9140-901834a6af10", "address": "fa:16:3e:91:a2:96", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5103b5-d9", "ovs_interfaceid": "9f5103b5-d9cc-4978-9140-901834a6af10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.475207] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.475360] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.475511] env[61998]: DEBUG nova.network.neutron [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.499277] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 954.531393] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388833, 'name': ReconfigVM_Task, 'duration_secs': 0.752023} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.531669] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Reconfigured VM instance instance-00000059 to attach disk [datastore2] c924f793-852e-4f45-85b1-b1e3fdc5d60d/c924f793-852e-4f45-85b1-b1e3fdc5d60d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.532341] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-319b3e3d-ad30-4266-901b-a737aa502d4b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.538364] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 954.538364] env[61998]: value = "task-1388835" [ 954.538364] env[61998]: _type = "Task" [ 954.538364] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.547207] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388835, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.702884] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-43ff4071-05f5-4e5c-a46d-1ca6c99809f0" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.703236] env[61998]: DEBUG nova.objects.instance [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lazy-loading 'migration_context' on Instance uuid 43ff4071-05f5-4e5c-a46d-1ca6c99809f0 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.747058] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.747305] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.006518] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 955.006518] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.276s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.020132] env[61998]: DEBUG nova.network.neutron [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.050566] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388835, 'name': Rename_Task, 'duration_secs': 0.198426} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.050743] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.050999] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91d6543e-4a9d-4c11-ab00-bfcf9567bf94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.059029] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 955.059029] env[61998]: value = "task-1388836" [ 955.059029] env[61998]: _type = "Task" [ 955.059029] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.067999] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.207875] env[61998]: DEBUG nova.objects.base [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Object Instance<43ff4071-05f5-4e5c-a46d-1ca6c99809f0> lazy-loaded attributes: info_cache,migration_context {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 955.208916] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084ce8b5-bc93-47d7-981d-4ca659804adb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.215818] env[61998]: DEBUG nova.network.neutron [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Updating instance_info_cache with network_info: [{"id": "39cacd52-c03f-4a41-b9bf-6afdc250e017", "address": "fa:16:3e:b8:00:75", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39cacd52-c0", "ovs_interfaceid": "39cacd52-c03f-4a41-b9bf-6afdc250e017", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.239033] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dacd010d-665b-401c-8dda-cbbc96f1f522 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.247310] env[61998]: DEBUG oslo_vmware.api [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 955.247310] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5228641d-ade7-f5f5-451a-2edb711e351a" [ 955.247310] env[61998]: _type = "Task" [ 955.247310] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.251463] env[61998]: DEBUG nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 955.260469] env[61998]: DEBUG oslo_vmware.api [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5228641d-ade7-f5f5-451a-2edb711e351a, 'name': SearchDatastore_Task, 'duration_secs': 0.007486} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.261849] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.262371] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.570284] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388836, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.720035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.720035] env[61998]: DEBUG nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Instance network_info: |[{"id": "39cacd52-c03f-4a41-b9bf-6afdc250e017", "address": "fa:16:3e:b8:00:75", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39cacd52-c0", "ovs_interfaceid": "39cacd52-c03f-4a41-b9bf-6afdc250e017", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 955.720737] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:00:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39cacd52-c03f-4a41-b9bf-6afdc250e017', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.729409] env[61998]: DEBUG oslo.service.loopingcall [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.729576] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.729827] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97727b15-2964-4764-8a10-ad4173c9accd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.751098] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.751098] env[61998]: value = "task-1388837" [ 955.751098] env[61998]: _type = "Task" [ 955.751098] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.762156] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388837, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.782368] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.820240] env[61998]: DEBUG nova.compute.manager [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Received event network-changed-39cacd52-c03f-4a41-b9bf-6afdc250e017 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 955.820450] env[61998]: DEBUG nova.compute.manager [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Refreshing instance network info cache due to event network-changed-39cacd52-c03f-4a41-b9bf-6afdc250e017. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 955.820666] env[61998]: DEBUG oslo_concurrency.lockutils [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] Acquiring lock "refresh_cache-89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.820814] env[61998]: DEBUG oslo_concurrency.lockutils [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] Acquired lock "refresh_cache-89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.820995] env[61998]: DEBUG nova.network.neutron [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Refreshing network info cache for port 39cacd52-c03f-4a41-b9bf-6afdc250e017 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 955.894764] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ec536a-4a1a-4a5c-957f-bf7027fca09f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.904158] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7383e499-3f0a-4ab6-b9bd-9b7e7c61ae56 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.934963] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318d8a0c-9914-437d-bf08-cf45de1955b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.942598] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38ff7f0-e4a0-4051-9228-e87b20abb75d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.957446] env[61998]: DEBUG nova.compute.provider_tree [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.070990] env[61998]: DEBUG oslo_vmware.api [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388836, 'name': PowerOnVM_Task, 'duration_secs': 0.575208} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.071334] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.071497] env[61998]: INFO nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Took 8.28 seconds to spawn the instance on the hypervisor. [ 956.071713] env[61998]: DEBUG nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 956.072550] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6eb519-fc38-46f3-9420-9a6e8ab3e567 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.262661] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388837, 'name': CreateVM_Task, 'duration_secs': 0.349037} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.262956] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.263837] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.263837] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.264288] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.264518] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3761cd35-2e1f-43b1-a9e7-b6fb5f8a2e7a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.269779] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 956.269779] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5292ede0-82c2-3479-126a-cd2ee36d685c" [ 956.269779] env[61998]: _type = "Task" [ 956.269779] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.277556] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5292ede0-82c2-3479-126a-cd2ee36d685c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.368469] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.368703] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.460486] env[61998]: DEBUG nova.scheduler.client.report [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 956.568952] env[61998]: DEBUG nova.network.neutron [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Updated VIF entry in instance network info cache for port 39cacd52-c03f-4a41-b9bf-6afdc250e017. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.569575] env[61998]: DEBUG nova.network.neutron [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Updating instance_info_cache with network_info: [{"id": "39cacd52-c03f-4a41-b9bf-6afdc250e017", "address": "fa:16:3e:b8:00:75", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39cacd52-c0", "ovs_interfaceid": "39cacd52-c03f-4a41-b9bf-6afdc250e017", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.599257] env[61998]: INFO nova.compute.manager [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Took 13.12 seconds to build instance. [ 956.780034] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5292ede0-82c2-3479-126a-cd2ee36d685c, 'name': SearchDatastore_Task, 'duration_secs': 0.015049} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.780340] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.780583] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.780819] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.780970] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.781204] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.781462] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e70c1ea-4fe9-4c0a-8c5c-f04804abf85a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.790613] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.790783] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.791475] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fac322f3-396f-4c4d-a89a-5e35b998e908 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.796294] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 956.796294] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5228ce70-3108-a1dc-efe7-c67eb55973f7" [ 956.796294] env[61998]: _type = "Task" [ 956.796294] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.804016] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5228ce70-3108-a1dc-efe7-c67eb55973f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.875931] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.876131] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 957.072405] env[61998]: DEBUG oslo_concurrency.lockutils [req-bb7d633e-baaf-4815-85c8-f45ae202148c req-21fc7e2b-735f-4f4c-ba62-25646ad77ab8 service nova] Releasing lock "refresh_cache-89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.100182] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fecbb7eb-d1ee-4444-b852-93a570579b70 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.632s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.177546] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07d42d3-dd3a-44f2-96bc-a6f5de067a4a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.186529] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8d376d3e-9ccc-4b73-9507-17b93d1f08e0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Suspending the VM {{(pid=61998) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 957.187088] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-60629f13-bf3f-40e3-add3-0096eaa85946 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.194728] env[61998]: DEBUG oslo_vmware.api [None req-8d376d3e-9ccc-4b73-9507-17b93d1f08e0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 957.194728] env[61998]: value = "task-1388839" [ 957.194728] env[61998]: _type = "Task" [ 957.194728] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.207850] env[61998]: DEBUG oslo_vmware.api [None req-8d376d3e-9ccc-4b73-9507-17b93d1f08e0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388839, 'name': SuspendVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.306788] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5228ce70-3108-a1dc-efe7-c67eb55973f7, 'name': SearchDatastore_Task, 'duration_secs': 0.009129} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.307915] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c06bd86-fb5e-4ea6-a8a8-1e6ce0953698 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.313772] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 957.313772] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c76553-e802-1fd8-bfdc-5c3838d53249" [ 957.313772] env[61998]: _type = "Task" [ 957.313772] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.322690] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c76553-e802-1fd8-bfdc-5c3838d53249, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.379406] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Didn't find any instances for network info cache update. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 957.379681] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.379850] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.380018] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.380160] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 957.470316] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.208s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.473833] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.692s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.475442] env[61998]: INFO nova.compute.claims [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.704951] env[61998]: DEBUG oslo_vmware.api [None req-8d376d3e-9ccc-4b73-9507-17b93d1f08e0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388839, 'name': SuspendVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.825054] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c76553-e802-1fd8-bfdc-5c3838d53249, 'name': SearchDatastore_Task, 'duration_secs': 0.056607} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.825054] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.825319] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1/89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.825592] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98dc0464-46e5-4170-917a-3c889e731e63 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.833033] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 957.833033] env[61998]: value = "task-1388840" [ 957.833033] env[61998]: _type = "Task" [ 957.833033] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.841042] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.043997] env[61998]: INFO nova.scheduler.client.report [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted allocation for migration 3ba573ea-8f31-4ccb-921a-24186b139c2d [ 958.208206] env[61998]: DEBUG oslo_vmware.api [None req-8d376d3e-9ccc-4b73-9507-17b93d1f08e0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388839, 'name': SuspendVM_Task, 'duration_secs': 0.768592} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.208289] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-8d376d3e-9ccc-4b73-9507-17b93d1f08e0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Suspended the VM {{(pid=61998) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 958.208523] env[61998]: DEBUG nova.compute.manager [None req-8d376d3e-9ccc-4b73-9507-17b93d1f08e0 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 958.209535] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8360540-61b6-4de4-a890-efdcad0ba86f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.343093] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46744} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.343405] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1/89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 958.343650] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.343953] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f95c1b18-268f-43a7-902b-ad18272d7e77 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.350508] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 958.350508] env[61998]: value = "task-1388841" [ 958.350508] env[61998]: _type = "Task" [ 958.350508] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.358089] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388841, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.551856] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a35c87bd-4fd9-4203-a19f-a751f8cbae35 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.393s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.603465] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afe617f-95b8-4a96-82da-427412af1be8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.613091] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfe95a2-6efb-4d93-b1f0-f92ad9b2bc79 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.661549] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13163818-3452-4ba4-8dda-c5dec931638f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.669315] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e1be54-f741-492b-a35f-0cc4c02c5632 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.684815] env[61998]: DEBUG nova.compute.provider_tree [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.861256] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388841, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062514} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.861256] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.861422] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d8e553-188e-4857-9c67-d9fa79f1d97f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.884362] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1/89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.884715] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e393e13a-b81e-4866-9912-1356d19a853d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.907194] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 958.907194] env[61998]: value = "task-1388842" [ 958.907194] env[61998]: _type = "Task" [ 958.907194] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.915334] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388842, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.088140] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.088414] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.088625] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.088812] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.089026] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.091702] env[61998]: INFO nova.compute.manager [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Terminating instance [ 959.093627] env[61998]: DEBUG nova.compute.manager [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 959.093825] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.094883] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2480fd2-ad7b-4239-b4c2-414b5a7e3a8f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.102943] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.103211] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87303010-ce3c-4b10-8e60-c5657faf172c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.109631] env[61998]: DEBUG oslo_vmware.api [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 959.109631] env[61998]: value = "task-1388843" [ 959.109631] env[61998]: _type = "Task" [ 959.109631] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.118416] env[61998]: DEBUG oslo_vmware.api [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.187925] env[61998]: DEBUG nova.scheduler.client.report [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 959.417022] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.538437] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "5789b2bc-a8c5-4986-bb53-7175cd566142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.538719] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.568548] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.568864] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.569126] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.569318] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.569489] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.571747] env[61998]: INFO nova.compute.manager [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Terminating instance [ 959.573583] env[61998]: DEBUG nova.compute.manager [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 959.573790] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.574645] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f730af6-f44b-40bd-a785-25fbaab39a2b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.582510] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.582739] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b73ab3c-022f-47b9-8457-ad8f26dbdf0c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.619021] env[61998]: DEBUG oslo_vmware.api [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388843, 'name': PowerOffVM_Task, 'duration_secs': 0.266262} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.619220] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.619436] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.619697] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eda73e3f-ae10-4207-8258-dfe4cb055576 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.659498] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 959.659720] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 959.659888] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleting the datastore file [datastore2] c924f793-852e-4f45-85b1-b1e3fdc5d60d {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.660199] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c825144c-2a2e-4f10-bdc2-98324f11cc23 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.666669] env[61998]: DEBUG oslo_vmware.api [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 959.666669] env[61998]: value = "task-1388846" [ 959.666669] env[61998]: _type = "Task" [ 959.666669] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.675212] env[61998]: DEBUG oslo_vmware.api [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.686044] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 959.686402] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 959.686486] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleting the datastore file [datastore1] 43ff4071-05f5-4e5c-a46d-1ca6c99809f0 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.686698] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e50709ba-07c0-4e58-8b64-0b4dd51953a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.693242] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.219s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.693745] env[61998]: DEBUG nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 959.696737] env[61998]: DEBUG oslo_vmware.api [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 959.696737] env[61998]: value = "task-1388847" [ 959.696737] env[61998]: _type = "Task" [ 959.696737] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.704665] env[61998]: DEBUG oslo_vmware.api [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.918200] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388842, 'name': ReconfigVM_Task, 'duration_secs': 0.749237} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.918529] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1/89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.919182] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-119a29f4-fe3f-41bc-b7ff-ef5fdea9be28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.925170] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 959.925170] env[61998]: value = "task-1388848" [ 959.925170] env[61998]: _type = "Task" [ 959.925170] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.934419] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388848, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.041050] env[61998]: DEBUG nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 960.177074] env[61998]: DEBUG oslo_vmware.api [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181559} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.177358] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.177551] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.177735] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.178018] env[61998]: INFO nova.compute.manager [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 960.178252] env[61998]: DEBUG oslo.service.loopingcall [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.178478] env[61998]: DEBUG nova.compute.manager [-] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 960.178573] env[61998]: DEBUG nova.network.neutron [-] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.198870] env[61998]: DEBUG nova.compute.utils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 960.200882] env[61998]: DEBUG nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 960.201190] env[61998]: DEBUG nova.network.neutron [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 960.215923] env[61998]: DEBUG oslo_vmware.api [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142073} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.217383] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.217586] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.217770] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.217986] env[61998]: INFO nova.compute.manager [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 960.218255] env[61998]: DEBUG oslo.service.loopingcall [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.218733] env[61998]: DEBUG nova.compute.manager [-] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 960.218850] env[61998]: DEBUG nova.network.neutron [-] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.320670] env[61998]: DEBUG nova.policy [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c162eeb76c2644008ccb73f1b9d66797', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b03f93634e114614937ca5c07c961174', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 960.440284] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388848, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.465792] env[61998]: DEBUG nova.compute.manager [req-cb543cfa-f833-45a3-af13-f7ec0bc00f96 req-076dbb9e-b604-425d-aa8c-05657c942170 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Received event network-vif-deleted-12db2882-2081-4fca-b174-a9af8c543a13 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 960.466010] env[61998]: INFO nova.compute.manager [req-cb543cfa-f833-45a3-af13-f7ec0bc00f96 req-076dbb9e-b604-425d-aa8c-05657c942170 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Neutron deleted interface 12db2882-2081-4fca-b174-a9af8c543a13; detaching it from the instance and deleting it from the info cache [ 960.466733] env[61998]: DEBUG nova.network.neutron [req-cb543cfa-f833-45a3-af13-f7ec0bc00f96 req-076dbb9e-b604-425d-aa8c-05657c942170 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.569424] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.569610] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.578020] env[61998]: INFO nova.compute.claims [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.706442] env[61998]: DEBUG nova.compute.manager [req-c684accf-35a7-4e73-8da2-eacbbc21e909 req-22df7588-2b32-43c3-bc8a-36af77fa0daa service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Received event network-vif-deleted-9f5103b5-d9cc-4978-9140-901834a6af10 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 960.706442] env[61998]: INFO nova.compute.manager [req-c684accf-35a7-4e73-8da2-eacbbc21e909 req-22df7588-2b32-43c3-bc8a-36af77fa0daa service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Neutron deleted interface 9f5103b5-d9cc-4978-9140-901834a6af10; detaching it from the instance and deleting it from the info cache [ 960.706442] env[61998]: DEBUG nova.network.neutron [req-c684accf-35a7-4e73-8da2-eacbbc21e909 req-22df7588-2b32-43c3-bc8a-36af77fa0daa service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.709094] env[61998]: DEBUG nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 960.798837] env[61998]: DEBUG nova.network.neutron [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Successfully created port: 74a584ad-09cd-42a1-bd2b-3262ccdfb78d {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.938134] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388848, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.941455] env[61998]: DEBUG nova.network.neutron [-] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.968668] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45a32dbb-3036-42bc-b361-6f593c6440c3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.978402] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10d7e76-5894-47c6-bfc1-4523223f65c3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.008774] env[61998]: DEBUG nova.compute.manager [req-cb543cfa-f833-45a3-af13-f7ec0bc00f96 req-076dbb9e-b604-425d-aa8c-05657c942170 service nova] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Detach interface failed, port_id=12db2882-2081-4fca-b174-a9af8c543a13, reason: Instance c924f793-852e-4f45-85b1-b1e3fdc5d60d could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 961.178097] env[61998]: DEBUG nova.network.neutron [-] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.208323] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59276f27-08f6-4e2c-8887-d09bec3ab279 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.220742] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f388f0-94f0-4ee8-82c2-7e82304443cd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.248686] env[61998]: DEBUG nova.compute.manager [req-c684accf-35a7-4e73-8da2-eacbbc21e909 req-22df7588-2b32-43c3-bc8a-36af77fa0daa service nova] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Detach interface failed, port_id=9f5103b5-d9cc-4978-9140-901834a6af10, reason: Instance 43ff4071-05f5-4e5c-a46d-1ca6c99809f0 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 961.437468] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388848, 'name': Rename_Task, 'duration_secs': 1.189348} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.437611] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.437848] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be0f9ef0-287c-48e9-a0e9-70c843130724 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.443746] env[61998]: INFO nova.compute.manager [-] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Took 1.27 seconds to deallocate network for instance. [ 961.444078] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 961.444078] env[61998]: value = "task-1388849" [ 961.444078] env[61998]: _type = "Task" [ 961.444078] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.455269] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.683255] env[61998]: INFO nova.compute.manager [-] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Took 1.46 seconds to deallocate network for instance. [ 961.701451] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25507f1-531e-480c-8c76-80a98af95f1c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.709083] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ef4b32-3f28-472e-8098-70af2f6ccd32 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.740911] env[61998]: DEBUG nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 961.744159] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f070d793-fde8-480a-a1e0-34c52d7cff6d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.752251] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7759f1-a5b1-4f9b-92ae-69f5b52d696a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.767317] env[61998]: DEBUG nova.compute.provider_tree [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.770324] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 961.770555] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 961.770714] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.770898] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 961.771062] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.771222] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 961.771440] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 961.771607] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 961.771780] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 961.771948] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 961.772152] env[61998]: DEBUG nova.virt.hardware [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 961.772884] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b387ce02-6064-4aba-b594-8db62e5452b4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.776377] env[61998]: DEBUG nova.scheduler.client.report [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 961.784629] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890b4086-5a19-42ce-bc16-f7ea9961a749 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.952091] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.957949] env[61998]: DEBUG oslo_vmware.api [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388849, 'name': PowerOnVM_Task, 'duration_secs': 0.443542} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.958246] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.958465] env[61998]: INFO nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Took 9.50 seconds to spawn the instance on the hypervisor. [ 961.958658] env[61998]: DEBUG nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 961.959529] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6941bebd-31ff-4c38-a863-8c92438e7233 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.191733] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.283076] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.711s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.283076] env[61998]: DEBUG nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 962.284153] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.332s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.284608] env[61998]: DEBUG nova.objects.instance [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'resources' on Instance uuid c924f793-852e-4f45-85b1-b1e3fdc5d60d {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.347556] env[61998]: DEBUG nova.network.neutron [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Successfully updated port: 74a584ad-09cd-42a1-bd2b-3262ccdfb78d {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.477383] env[61998]: INFO nova.compute.manager [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Took 14.71 seconds to build instance. [ 962.737410] env[61998]: DEBUG nova.compute.manager [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Received event network-vif-plugged-74a584ad-09cd-42a1-bd2b-3262ccdfb78d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 962.737606] env[61998]: DEBUG oslo_concurrency.lockutils [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] Acquiring lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.737889] env[61998]: DEBUG oslo_concurrency.lockutils [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.738041] env[61998]: DEBUG oslo_concurrency.lockutils [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.738225] env[61998]: DEBUG nova.compute.manager [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] No waiting events found dispatching network-vif-plugged-74a584ad-09cd-42a1-bd2b-3262ccdfb78d {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.738394] env[61998]: WARNING nova.compute.manager [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Received unexpected event network-vif-plugged-74a584ad-09cd-42a1-bd2b-3262ccdfb78d for instance with vm_state building and task_state spawning. [ 962.738559] env[61998]: DEBUG nova.compute.manager [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Received event network-changed-74a584ad-09cd-42a1-bd2b-3262ccdfb78d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 962.738715] env[61998]: DEBUG nova.compute.manager [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Refreshing instance network info cache due to event network-changed-74a584ad-09cd-42a1-bd2b-3262ccdfb78d. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 962.738899] env[61998]: DEBUG oslo_concurrency.lockutils [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] Acquiring lock "refresh_cache-4ab6f2f2-07c8-4477-a433-b6408cd919bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.739052] env[61998]: DEBUG oslo_concurrency.lockutils [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] Acquired lock "refresh_cache-4ab6f2f2-07c8-4477-a433-b6408cd919bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.739222] env[61998]: DEBUG nova.network.neutron [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Refreshing network info cache for port 74a584ad-09cd-42a1-bd2b-3262ccdfb78d {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 962.787569] env[61998]: DEBUG nova.compute.utils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 962.792262] env[61998]: DEBUG nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 962.792262] env[61998]: DEBUG nova.network.neutron [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 962.835030] env[61998]: DEBUG nova.policy [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f155bbfca47547c2bf745811003ffcec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f49104f21d7147328bcc8edee8d3cdb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 962.850266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "refresh_cache-4ab6f2f2-07c8-4477-a433-b6408cd919bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.900147] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbb009e-7226-4ef9-8007-3c06cdfb7103 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.907397] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faad701c-9969-4e8d-96a4-9d4d8df7eeeb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.937103] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1875afb7-9433-4e0c-ba96-f51cfb146ad5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.944269] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782cbac5-5d33-4466-b5bb-6fed5ff7bdd7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.957302] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.957837] env[61998]: DEBUG nova.compute.provider_tree [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.979529] env[61998]: DEBUG oslo_concurrency.lockutils [None req-248ecf58-99a5-44a4-90fc-200386393cd9 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.222s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.979815] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.023s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.980108] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.980424] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.980670] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.982859] env[61998]: INFO nova.compute.manager [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Terminating instance [ 962.984882] env[61998]: DEBUG nova.compute.manager [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 962.985121] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.986065] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ea5bb2-67bb-405d-b9e4-b24e10a0b11f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.994565] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.994810] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b0d062d-4a1a-4e8e-939d-8f6b2a8794fb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.001198] env[61998]: DEBUG oslo_vmware.api [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 963.001198] env[61998]: value = "task-1388850" [ 963.001198] env[61998]: _type = "Task" [ 963.001198] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.011046] env[61998]: DEBUG oslo_vmware.api [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.086394] env[61998]: DEBUG nova.network.neutron [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Successfully created port: 755f7df1-f8ab-44a0-92fd-7ffcdc053632 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.292395] env[61998]: DEBUG nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 963.304374] env[61998]: DEBUG nova.network.neutron [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.439051] env[61998]: DEBUG nova.network.neutron [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.462653] env[61998]: DEBUG nova.scheduler.client.report [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 963.511332] env[61998]: DEBUG oslo_vmware.api [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388850, 'name': PowerOffVM_Task, 'duration_secs': 0.18325} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.511630] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.511776] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.512036] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-784ea014-50a4-4d38-bb63-89977b6c7f6b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.571549] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.571741] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.571930] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleting the datastore file [datastore2] 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.572214] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44d2970f-8793-4040-8f2b-ffb51911a561 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.578875] env[61998]: DEBUG oslo_vmware.api [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 963.578875] env[61998]: value = "task-1388852" [ 963.578875] env[61998]: _type = "Task" [ 963.578875] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.585889] env[61998]: DEBUG oslo_vmware.api [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388852, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.944739] env[61998]: DEBUG oslo_concurrency.lockutils [req-2eec980a-07cb-4527-b3af-a4ed37046724 req-41728ff4-fbd2-474b-97f6-cb2175e398db service nova] Releasing lock "refresh_cache-4ab6f2f2-07c8-4477-a433-b6408cd919bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.945144] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquired lock "refresh_cache-4ab6f2f2-07c8-4477-a433-b6408cd919bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.945311] env[61998]: DEBUG nova.network.neutron [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.967127] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.683s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.970323] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.778s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.970323] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.991079] env[61998]: INFO nova.scheduler.client.report [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted allocations for instance c924f793-852e-4f45-85b1-b1e3fdc5d60d [ 963.992970] env[61998]: INFO nova.scheduler.client.report [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted allocations for instance 43ff4071-05f5-4e5c-a46d-1ca6c99809f0 [ 964.088807] env[61998]: DEBUG oslo_vmware.api [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388852, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160143} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.089083] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.089294] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.089490] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.089668] env[61998]: INFO nova.compute.manager [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Took 1.10 seconds to destroy the instance on the hypervisor. [ 964.089912] env[61998]: DEBUG oslo.service.loopingcall [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 964.090431] env[61998]: DEBUG nova.compute.manager [-] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 964.090431] env[61998]: DEBUG nova.network.neutron [-] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 964.302831] env[61998]: DEBUG nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 964.327797] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 964.328066] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 964.328272] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.328494] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 964.328654] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.328807] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 964.329035] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 964.329211] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 964.329386] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 964.329554] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 964.329730] env[61998]: DEBUG nova.virt.hardware [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 964.330817] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa960fb7-ba62-4d55-89ed-ce0f3b5bb70b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.339446] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b66b3d-b0cf-458d-a616-ca2953196b5e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.502454] env[61998]: DEBUG oslo_concurrency.lockutils [None req-32124559-ab00-4f34-ba0a-21c865ead7a7 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "c924f793-852e-4f45-85b1-b1e3fdc5d60d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.934s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.503437] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2c1ae20-31bd-4880-a54b-3fc4f08f5aa9 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "43ff4071-05f5-4e5c-a46d-1ca6c99809f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.415s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.507225] env[61998]: DEBUG nova.network.neutron [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 964.741098] env[61998]: DEBUG nova.network.neutron [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Updating instance_info_cache with network_info: [{"id": "74a584ad-09cd-42a1-bd2b-3262ccdfb78d", "address": "fa:16:3e:8d:bf:cf", "network": {"id": "d6005aef-7d09-4175-a60f-f104c2369efd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1871055484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b03f93634e114614937ca5c07c961174", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74a584ad-09", "ovs_interfaceid": "74a584ad-09cd-42a1-bd2b-3262ccdfb78d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.745942] env[61998]: DEBUG nova.compute.manager [req-895dc346-f028-49ec-89eb-496d74b3b649 req-693f7a33-d910-4976-a695-ff0202e58e75 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Received event network-vif-plugged-755f7df1-f8ab-44a0-92fd-7ffcdc053632 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 964.745942] env[61998]: DEBUG oslo_concurrency.lockutils [req-895dc346-f028-49ec-89eb-496d74b3b649 req-693f7a33-d910-4976-a695-ff0202e58e75 service nova] Acquiring lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.745942] env[61998]: DEBUG oslo_concurrency.lockutils [req-895dc346-f028-49ec-89eb-496d74b3b649 req-693f7a33-d910-4976-a695-ff0202e58e75 service nova] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.745942] env[61998]: DEBUG oslo_concurrency.lockutils [req-895dc346-f028-49ec-89eb-496d74b3b649 req-693f7a33-d910-4976-a695-ff0202e58e75 service nova] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.745942] env[61998]: DEBUG nova.compute.manager [req-895dc346-f028-49ec-89eb-496d74b3b649 req-693f7a33-d910-4976-a695-ff0202e58e75 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] No waiting events found dispatching network-vif-plugged-755f7df1-f8ab-44a0-92fd-7ffcdc053632 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 964.745942] env[61998]: WARNING nova.compute.manager [req-895dc346-f028-49ec-89eb-496d74b3b649 req-693f7a33-d910-4976-a695-ff0202e58e75 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Received unexpected event network-vif-plugged-755f7df1-f8ab-44a0-92fd-7ffcdc053632 for instance with vm_state building and task_state spawning. [ 964.769301] env[61998]: DEBUG nova.compute.manager [req-5c132e35-be45-44f0-85d5-c5d9438b561a req-280f3d8b-ceaf-4e5b-81bf-c02a055f9ee9 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Received event network-vif-deleted-39cacd52-c03f-4a41-b9bf-6afdc250e017 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 964.769539] env[61998]: INFO nova.compute.manager [req-5c132e35-be45-44f0-85d5-c5d9438b561a req-280f3d8b-ceaf-4e5b-81bf-c02a055f9ee9 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Neutron deleted interface 39cacd52-c03f-4a41-b9bf-6afdc250e017; detaching it from the instance and deleting it from the info cache [ 964.769767] env[61998]: DEBUG nova.network.neutron [req-5c132e35-be45-44f0-85d5-c5d9438b561a req-280f3d8b-ceaf-4e5b-81bf-c02a055f9ee9 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.807960] env[61998]: DEBUG nova.network.neutron [-] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.243062] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Releasing lock "refresh_cache-4ab6f2f2-07c8-4477-a433-b6408cd919bc" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.243352] env[61998]: DEBUG nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Instance network_info: |[{"id": "74a584ad-09cd-42a1-bd2b-3262ccdfb78d", "address": "fa:16:3e:8d:bf:cf", "network": {"id": "d6005aef-7d09-4175-a60f-f104c2369efd", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1871055484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b03f93634e114614937ca5c07c961174", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74a584ad-09", "ovs_interfaceid": "74a584ad-09cd-42a1-bd2b-3262ccdfb78d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 965.243800] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:bf:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c405e9f-a6c8-4308-acac-071654efe18e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74a584ad-09cd-42a1-bd2b-3262ccdfb78d', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.252058] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Creating folder: Project (b03f93634e114614937ca5c07c961174). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.252340] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc1e600a-ca35-4154-8316-06d809aa382a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.263230] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Created folder: Project (b03f93634e114614937ca5c07c961174) in parent group-v294665. [ 965.263408] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Creating folder: Instances. Parent ref: group-v294775. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.263621] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa9a6678-4b5c-4683-8fa6-01ed0318aa61 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.273577] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08c6bc71-daf7-4d68-8727-791027021eb8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.275739] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Created folder: Instances in parent group-v294775. [ 965.275994] env[61998]: DEBUG oslo.service.loopingcall [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.276245] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.276755] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ab7dff6-ad92-41ec-870b-6e3d283a2376 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.293772] env[61998]: DEBUG nova.network.neutron [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Successfully updated port: 755f7df1-f8ab-44a0-92fd-7ffcdc053632 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.298968] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d04690d-c702-46d7-8d2f-795dda59a194 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.311699] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.311908] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.312158] env[61998]: DEBUG nova.network.neutron [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.313250] env[61998]: INFO nova.compute.manager [-] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Took 1.22 seconds to deallocate network for instance. [ 965.313527] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.313527] env[61998]: value = "task-1388855" [ 965.313527] env[61998]: _type = "Task" [ 965.313527] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.328942] env[61998]: DEBUG nova.compute.manager [req-5c132e35-be45-44f0-85d5-c5d9438b561a req-280f3d8b-ceaf-4e5b-81bf-c02a055f9ee9 service nova] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Detach interface failed, port_id=39cacd52-c03f-4a41-b9bf-6afdc250e017, reason: Instance 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 965.332413] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388855, 'name': CreateVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.498301] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.498551] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.832956] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.833537] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.833682] env[61998]: DEBUG nova.objects.instance [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lazy-loading 'resources' on Instance uuid 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.848295] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388855, 'name': CreateVM_Task, 'duration_secs': 0.302998} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.849967] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.850669] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.850852] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.851206] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.853035] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ac058ae-edce-4811-975b-5ea8bc70b6a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.864178] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 965.864178] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa27b7-cd9a-7461-2690-6033f97a5d8d" [ 965.864178] env[61998]: _type = "Task" [ 965.864178] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.865416] env[61998]: DEBUG nova.network.neutron [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.881426] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa27b7-cd9a-7461-2690-6033f97a5d8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009652} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.884598] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.884873] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.885147] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.885302] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.886526] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.886787] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbbddcfc-ed5d-4d3e-b57f-dd370e82833d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.902498] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.903098] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.903821] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-890b50b0-1361-4a9c-bb1e-4749445c3635 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.910180] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 965.910180] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52f6812a-f59c-5948-cbc7-9464b3d851b8" [ 965.910180] env[61998]: _type = "Task" [ 965.910180] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.918219] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52f6812a-f59c-5948-cbc7-9464b3d851b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.001489] env[61998]: DEBUG nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 966.050244] env[61998]: DEBUG nova.network.neutron [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance_info_cache with network_info: [{"id": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "address": "fa:16:3e:c8:1f:fe", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755f7df1-f8", "ovs_interfaceid": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.420864] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52f6812a-f59c-5948-cbc7-9464b3d851b8, 'name': SearchDatastore_Task, 'duration_secs': 0.008976} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.423703] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb4b4555-8b53-4d2f-99fd-c33ca840fea4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.428702] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 966.428702] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b17ad1-0d8d-aeb1-5600-40b39afd7051" [ 966.428702] env[61998]: _type = "Task" [ 966.428702] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.432675] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c52fc05-8250-4b4e-826d-d171bf06e1b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.440331] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b17ad1-0d8d-aeb1-5600-40b39afd7051, 'name': SearchDatastore_Task, 'duration_secs': 0.009027} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.441803] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.442030] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 4ab6f2f2-07c8-4477-a433-b6408cd919bc/4ab6f2f2-07c8-4477-a433-b6408cd919bc.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.442315] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3a66e92-741c-4a7c-b209-a0de8a92e2ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.444657] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20d6727-91f7-4819-bbba-d48d1b95fa33 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.474669] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e337b25e-0872-48f4-aa48-3b7da87abac6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.476997] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 966.476997] env[61998]: value = "task-1388856" [ 966.476997] env[61998]: _type = "Task" [ 966.476997] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.482862] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bf8eda-2e0f-4393-a1a0-406adf3fa741 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.490610] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.499581] env[61998]: DEBUG nova.compute.provider_tree [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.519704] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.552439] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.552749] env[61998]: DEBUG nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Instance network_info: |[{"id": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "address": "fa:16:3e:c8:1f:fe", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755f7df1-f8", "ovs_interfaceid": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 966.553176] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:1f:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '755f7df1-f8ab-44a0-92fd-7ffcdc053632', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 966.560963] env[61998]: DEBUG oslo.service.loopingcall [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.561154] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 966.561386] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6cbecd98-bf72-4a56-9406-acbb497fb007 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.580111] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 966.580111] env[61998]: value = "task-1388857" [ 966.580111] env[61998]: _type = "Task" [ 966.580111] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.587983] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388857, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.799992] env[61998]: DEBUG nova.compute.manager [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Received event network-changed-755f7df1-f8ab-44a0-92fd-7ffcdc053632 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 966.800373] env[61998]: DEBUG nova.compute.manager [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Refreshing instance network info cache due to event network-changed-755f7df1-f8ab-44a0-92fd-7ffcdc053632. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 966.800470] env[61998]: DEBUG oslo_concurrency.lockutils [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] Acquiring lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.800577] env[61998]: DEBUG oslo_concurrency.lockutils [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] Acquired lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.800744] env[61998]: DEBUG nova.network.neutron [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Refreshing network info cache for port 755f7df1-f8ab-44a0-92fd-7ffcdc053632 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 966.988422] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388856, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456183} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.988800] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 4ab6f2f2-07c8-4477-a433-b6408cd919bc/4ab6f2f2-07c8-4477-a433-b6408cd919bc.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 966.988995] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 966.989298] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdf6cfd1-1ba0-4d2d-bc68-ac7194473c31 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.995852] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 966.995852] env[61998]: value = "task-1388858" [ 966.995852] env[61998]: _type = "Task" [ 966.995852] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.004555] env[61998]: DEBUG nova.scheduler.client.report [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 967.007783] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.090731] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388857, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.484648] env[61998]: DEBUG nova.network.neutron [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updated VIF entry in instance network info cache for port 755f7df1-f8ab-44a0-92fd-7ffcdc053632. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.485032] env[61998]: DEBUG nova.network.neutron [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance_info_cache with network_info: [{"id": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "address": "fa:16:3e:c8:1f:fe", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755f7df1-f8", "ovs_interfaceid": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.506021] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067585} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.506315] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.507095] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d97854-2fcd-4220-bd22-4101c5ed4587 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.509930] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.511951] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.992s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.513671] env[61998]: INFO nova.compute.claims [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.535646] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 4ab6f2f2-07c8-4477-a433-b6408cd919bc/4ab6f2f2-07c8-4477-a433-b6408cd919bc.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.536673] env[61998]: INFO nova.scheduler.client.report [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted allocations for instance 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1 [ 967.537824] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2eba344f-472e-4732-9524-076b9c793ad4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.561083] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 967.561083] env[61998]: value = "task-1388859" [ 967.561083] env[61998]: _type = "Task" [ 967.561083] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.569893] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388859, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.592055] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388857, 'name': CreateVM_Task, 'duration_secs': 0.53317} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.592754] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 967.593442] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.593610] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.593935] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 967.594421] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cca89f7-fffa-411f-a509-60f0aed1b040 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.598877] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 967.598877] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]520a6dd0-d933-9218-6d8f-5c9630cd0821" [ 967.598877] env[61998]: _type = "Task" [ 967.598877] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.606463] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]520a6dd0-d933-9218-6d8f-5c9630cd0821, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.988146] env[61998]: DEBUG oslo_concurrency.lockutils [req-1dbebffc-2829-4457-9e43-d6d2d1ebea37 req-860ce492-d631-4dd3-a958-d31ffe703805 service nova] Releasing lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.059182] env[61998]: DEBUG oslo_concurrency.lockutils [None req-14a871e1-fb9f-48e8-9b95-5c9d3809e99d tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.079s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.072416] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388859, 'name': ReconfigVM_Task, 'duration_secs': 0.256602} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.073192] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 4ab6f2f2-07c8-4477-a433-b6408cd919bc/4ab6f2f2-07c8-4477-a433-b6408cd919bc.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.073794] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9a10efd-2806-4042-9a6b-df65f9fb1a75 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.080486] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 968.080486] env[61998]: value = "task-1388860" [ 968.080486] env[61998]: _type = "Task" [ 968.080486] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.088935] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388860, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.108163] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]520a6dd0-d933-9218-6d8f-5c9630cd0821, 'name': SearchDatastore_Task, 'duration_secs': 0.008145} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.108480] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.108712] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 968.108964] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.109159] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.109360] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.109876] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6a64c2d-4ffc-4db4-9d5b-0b8448da92a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.116443] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.116613] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 968.117317] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f292ff42-4954-4832-9dd6-9eb08c29ff92 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.122070] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 968.122070] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c22304-0f34-69f7-4295-e028b89fd5d2" [ 968.122070] env[61998]: _type = "Task" [ 968.122070] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.129250] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c22304-0f34-69f7-4295-e028b89fd5d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.419275] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.419587] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.419812] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.420011] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.420203] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.422374] env[61998]: INFO nova.compute.manager [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Terminating instance [ 968.424091] env[61998]: DEBUG nova.compute.manager [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 968.424289] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.425150] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0db1434-44ac-4b80-89b2-1ab0fdf117fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.432848] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.433085] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c69d665e-34e9-48e5-b0b9-71c9d3986ab9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.439303] env[61998]: DEBUG oslo_vmware.api [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 968.439303] env[61998]: value = "task-1388861" [ 968.439303] env[61998]: _type = "Task" [ 968.439303] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.446470] env[61998]: DEBUG oslo_vmware.api [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.589588] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388860, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.600052] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac448e9a-20f5-4753-bef8-58cd03fde98e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.606784] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70afc2f8-1d86-4492-aa9d-7c66c23624a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.639511] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f0d3f9-6e49-4b38-96e8-55b3fe32e4ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.649722] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02c67dc-8e26-4698-937a-74841bb45357 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.653263] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c22304-0f34-69f7-4295-e028b89fd5d2, 'name': SearchDatastore_Task, 'duration_secs': 0.006978} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.654248] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29d4bb82-3cd1-4a36-9038-02034791a1c9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.663445] env[61998]: DEBUG nova.compute.provider_tree [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.667759] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 968.667759] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]521695c1-8cac-92ea-42b2-bdc9c5b65c5b" [ 968.667759] env[61998]: _type = "Task" [ 968.667759] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.674722] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521695c1-8cac-92ea-42b2-bdc9c5b65c5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.950706] env[61998]: DEBUG oslo_vmware.api [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.090896] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388860, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.166636] env[61998]: DEBUG nova.scheduler.client.report [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 969.178956] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]521695c1-8cac-92ea-42b2-bdc9c5b65c5b, 'name': SearchDatastore_Task, 'duration_secs': 0.00924} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.179266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.179913] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 5789b2bc-a8c5-4986-bb53-7175cd566142/5789b2bc-a8c5-4986-bb53-7175cd566142.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 969.179913] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0703fe1d-8989-4332-86d5-e451c48c7a4d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.188845] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 969.188845] env[61998]: value = "task-1388862" [ 969.188845] env[61998]: _type = "Task" [ 969.188845] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.196630] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.453103] env[61998]: DEBUG oslo_vmware.api [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388861, 'name': PowerOffVM_Task, 'duration_secs': 0.926505} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.453568] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.453717] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.454025] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4760a4e-b2e6-4c51-8bec-45367a21dadf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.544681] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.544980] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.545192] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleting the datastore file [datastore1] a67aa33f-c7ba-44da-bdfa-e0a53a8538ad {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.545483] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6da2c4ce-dbfc-4f56-879c-30a5e0e4dd4f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.552159] env[61998]: DEBUG oslo_vmware.api [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 969.552159] env[61998]: value = "task-1388864" [ 969.552159] env[61998]: _type = "Task" [ 969.552159] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.561236] env[61998]: DEBUG oslo_vmware.api [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.593511] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388860, 'name': Rename_Task, 'duration_secs': 1.135006} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.593775] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.593936] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4a96c27-a2f9-4aac-beb1-7a0821aad93a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.601364] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 969.601364] env[61998]: value = "task-1388865" [ 969.601364] env[61998]: _type = "Task" [ 969.601364] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.610043] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.674259] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.162s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.674751] env[61998]: DEBUG nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 969.702049] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388862, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502641} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.702442] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 5789b2bc-a8c5-4986-bb53-7175cd566142/5789b2bc-a8c5-4986-bb53-7175cd566142.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 969.702792] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 969.703165] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4a9dd1e-75a7-4ddd-ac53-ac80b9bfa0d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.710908] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 969.710908] env[61998]: value = "task-1388866" [ 969.710908] env[61998]: _type = "Task" [ 969.710908] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.724078] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.063819] env[61998]: DEBUG oslo_vmware.api [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310228} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.064117] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.064312] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.064493] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.064670] env[61998]: INFO nova.compute.manager [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Took 1.64 seconds to destroy the instance on the hypervisor. [ 970.064949] env[61998]: DEBUG oslo.service.loopingcall [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.065295] env[61998]: DEBUG nova.compute.manager [-] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 970.065440] env[61998]: DEBUG nova.network.neutron [-] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 970.112401] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388865, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.180329] env[61998]: DEBUG nova.compute.utils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 970.181790] env[61998]: DEBUG nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 970.181977] env[61998]: DEBUG nova.network.neutron [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.220892] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072393} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.221274] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 970.222021] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99f1004-fb22-40ed-8688-0d2c2b4c5dbc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.248251] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 5789b2bc-a8c5-4986-bb53-7175cd566142/5789b2bc-a8c5-4986-bb53-7175cd566142.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.248588] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad1b9f07-ef7f-4afc-9eb6-8b383954f183 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.268604] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 970.268604] env[61998]: value = "task-1388867" [ 970.268604] env[61998]: _type = "Task" [ 970.268604] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.276949] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.286821] env[61998]: DEBUG nova.policy [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8b17f109d724201a22264aa6ee02ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82b8854f80cf48628167fd6f678d7dd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 970.613357] env[61998]: DEBUG oslo_vmware.api [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388865, 'name': PowerOnVM_Task, 'duration_secs': 0.530447} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.613357] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 970.613503] env[61998]: INFO nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Took 8.87 seconds to spawn the instance on the hypervisor. [ 970.613615] env[61998]: DEBUG nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 970.614408] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d97fcd-5b98-46c5-a543-2b34425c95c3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.642254] env[61998]: DEBUG nova.compute.manager [req-08a6b699-5333-4af4-b1c9-00d409f4e6f5 req-5c71937c-cac0-469f-92b9-ebdbffe8393f service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Received event network-vif-deleted-67af4e0e-6ff7-417c-8f5f-9783e9786ff3 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 970.642470] env[61998]: INFO nova.compute.manager [req-08a6b699-5333-4af4-b1c9-00d409f4e6f5 req-5c71937c-cac0-469f-92b9-ebdbffe8393f service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Neutron deleted interface 67af4e0e-6ff7-417c-8f5f-9783e9786ff3; detaching it from the instance and deleting it from the info cache [ 970.642645] env[61998]: DEBUG nova.network.neutron [req-08a6b699-5333-4af4-b1c9-00d409f4e6f5 req-5c71937c-cac0-469f-92b9-ebdbffe8393f service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.685652] env[61998]: DEBUG nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 970.779524] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.836316] env[61998]: DEBUG nova.network.neutron [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Successfully created port: e6c7c097-27f3-40b7-b085-a8b5f170210d {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 971.125015] env[61998]: DEBUG nova.network.neutron [-] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.135236] env[61998]: INFO nova.compute.manager [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Took 15.38 seconds to build instance. [ 971.152589] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2a4825b-ad21-4562-b357-56884b466ce7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.162077] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03ee8ed-1ab8-4e4b-ba64-60194786451f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.188673] env[61998]: DEBUG nova.compute.manager [req-08a6b699-5333-4af4-b1c9-00d409f4e6f5 req-5c71937c-cac0-469f-92b9-ebdbffe8393f service nova] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Detach interface failed, port_id=67af4e0e-6ff7-417c-8f5f-9783e9786ff3, reason: Instance a67aa33f-c7ba-44da-bdfa-e0a53a8538ad could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 971.279786] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.627566] env[61998]: INFO nova.compute.manager [-] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Took 1.56 seconds to deallocate network for instance. [ 971.636636] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d635af2b-3c6b-49be-aa36-2a7565c71b35 tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.889s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.694489] env[61998]: DEBUG nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 971.720460] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.720460] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.720460] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.720681] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.720773] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.720892] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.721115] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.721286] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.721459] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.721624] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.721799] env[61998]: DEBUG nova.virt.hardware [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.722743] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a18ebc-0924-4624-9263-517ad88485f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.731389] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ca6f2b-6344-46e4-b1d1-b6342d019c7e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.779031] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388867, 'name': ReconfigVM_Task, 'duration_secs': 1.195261} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.779150] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 5789b2bc-a8c5-4986-bb53-7175cd566142/5789b2bc-a8c5-4986-bb53-7175cd566142.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.779701] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fca72bf2-7921-4818-bdd5-964bbd1f24dd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.785964] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 971.785964] env[61998]: value = "task-1388868" [ 971.785964] env[61998]: _type = "Task" [ 971.785964] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.793296] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388868, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.134353] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.134632] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.134857] env[61998]: DEBUG nova.objects.instance [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lazy-loading 'resources' on Instance uuid a67aa33f-c7ba-44da-bdfa-e0a53a8538ad {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.211585] env[61998]: DEBUG nova.compute.manager [req-b2c3eb56-cda9-4d6e-a3b5-f40e1433cd57 req-c60f4f73-5ccf-4426-b699-25c61ae11de9 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Received event network-vif-plugged-e6c7c097-27f3-40b7-b085-a8b5f170210d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 972.211839] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2c3eb56-cda9-4d6e-a3b5-f40e1433cd57 req-c60f4f73-5ccf-4426-b699-25c61ae11de9 service nova] Acquiring lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.212477] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2c3eb56-cda9-4d6e-a3b5-f40e1433cd57 req-c60f4f73-5ccf-4426-b699-25c61ae11de9 service nova] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.212673] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2c3eb56-cda9-4d6e-a3b5-f40e1433cd57 req-c60f4f73-5ccf-4426-b699-25c61ae11de9 service nova] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.212843] env[61998]: DEBUG nova.compute.manager [req-b2c3eb56-cda9-4d6e-a3b5-f40e1433cd57 req-c60f4f73-5ccf-4426-b699-25c61ae11de9 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] No waiting events found dispatching network-vif-plugged-e6c7c097-27f3-40b7-b085-a8b5f170210d {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 972.213028] env[61998]: WARNING nova.compute.manager [req-b2c3eb56-cda9-4d6e-a3b5-f40e1433cd57 req-c60f4f73-5ccf-4426-b699-25c61ae11de9 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Received unexpected event network-vif-plugged-e6c7c097-27f3-40b7-b085-a8b5f170210d for instance with vm_state building and task_state spawning. [ 972.296628] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388868, 'name': Rename_Task, 'duration_secs': 0.150734} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.297379] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.297817] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3f38e3b-cee6-4a25-9ff0-b9ccecd40fec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.304728] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 972.304728] env[61998]: value = "task-1388869" [ 972.304728] env[61998]: _type = "Task" [ 972.304728] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.314472] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388869, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.724641] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475618c9-ea1e-4618-b5ec-aeaafd8350b8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.732704] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8088a480-2473-4f47-b2c8-798b340184a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.764136] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc54e8d-2734-40d7-b0b9-073eb59010f3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.775450] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fb5b0e-68e4-4555-8af6-17943c4786b0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.780250] env[61998]: DEBUG nova.network.neutron [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Successfully updated port: e6c7c097-27f3-40b7-b085-a8b5f170210d {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.796364] env[61998]: DEBUG nova.compute.provider_tree [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.800179] env[61998]: DEBUG nova.compute.manager [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Received event network-changed-e6c7c097-27f3-40b7-b085-a8b5f170210d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 972.800425] env[61998]: DEBUG nova.compute.manager [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Refreshing instance network info cache due to event network-changed-e6c7c097-27f3-40b7-b085-a8b5f170210d. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 972.800653] env[61998]: DEBUG oslo_concurrency.lockutils [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] Acquiring lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.800926] env[61998]: DEBUG oslo_concurrency.lockutils [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] Acquired lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.800985] env[61998]: DEBUG nova.network.neutron [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Refreshing network info cache for port e6c7c097-27f3-40b7-b085-a8b5f170210d {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.814540] env[61998]: DEBUG oslo_vmware.api [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388869, 'name': PowerOnVM_Task, 'duration_secs': 0.507926} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.814540] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.814872] env[61998]: INFO nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Took 8.51 seconds to spawn the instance on the hypervisor. [ 972.814936] env[61998]: DEBUG nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 972.815718] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4ba810-8ba2-4d1e-b052-74a3d011ed1a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.270801] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.271257] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.271536] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.271851] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.272154] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.275020] env[61998]: INFO nova.compute.manager [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Terminating instance [ 973.276946] env[61998]: DEBUG nova.compute.manager [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 973.277246] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.278209] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def97dd7-ea32-4add-8f2b-9d98ea7899ec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.285387] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.287815] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.288072] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c449f952-3317-4ec7-afed-e7518a54c94c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.294030] env[61998]: DEBUG oslo_vmware.api [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 973.294030] env[61998]: value = "task-1388870" [ 973.294030] env[61998]: _type = "Task" [ 973.294030] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.298878] env[61998]: DEBUG nova.scheduler.client.report [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 973.306970] env[61998]: DEBUG oslo_vmware.api [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.334772] env[61998]: INFO nova.compute.manager [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Took 12.79 seconds to build instance. [ 973.337985] env[61998]: DEBUG nova.network.neutron [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.435310] env[61998]: DEBUG nova.network.neutron [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.804329] env[61998]: DEBUG oslo_vmware.api [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388870, 'name': PowerOffVM_Task, 'duration_secs': 0.227991} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.804563] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.804763] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.805063] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc00682d-376a-4ea9-9b5a-aee13e9fb4ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.808529] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.828777] env[61998]: INFO nova.scheduler.client.report [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted allocations for instance a67aa33f-c7ba-44da-bdfa-e0a53a8538ad [ 973.836665] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c31695f0-8987-4182-a42b-91ab954a89ad tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.298s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.879442] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.880019] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.880019] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Deleting the datastore file [datastore1] 4ab6f2f2-07c8-4477-a433-b6408cd919bc {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.880371] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6cd05d1-bb4f-4832-a78c-a4f9f5af1952 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.886801] env[61998]: DEBUG oslo_vmware.api [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for the task: (returnval){ [ 973.886801] env[61998]: value = "task-1388872" [ 973.886801] env[61998]: _type = "Task" [ 973.886801] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.894952] env[61998]: DEBUG oslo_vmware.api [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.938019] env[61998]: DEBUG oslo_concurrency.lockutils [req-dfa16086-0fc9-4e75-b0c0-5571704b52eb req-4693c39a-f801-479f-8097-240271705b49 service nova] Releasing lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.938472] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.938640] env[61998]: DEBUG nova.network.neutron [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 974.337340] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4eb75364-1fa3-45e5-abdb-32bfe66b6c03 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a67aa33f-c7ba-44da-bdfa-e0a53a8538ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.918s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.397678] env[61998]: DEBUG oslo_vmware.api [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Task: {'id': task-1388872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.456255} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.397939] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.398145] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.398368] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.398543] env[61998]: INFO nova.compute.manager [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 974.398793] env[61998]: DEBUG oslo.service.loopingcall [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.398995] env[61998]: DEBUG nova.compute.manager [-] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 974.399107] env[61998]: DEBUG nova.network.neutron [-] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.481353] env[61998]: DEBUG nova.network.neutron [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.742700] env[61998]: DEBUG nova.network.neutron [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance_info_cache with network_info: [{"id": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "address": "fa:16:3e:34:5d:81", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c7c097-27", "ovs_interfaceid": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.848964] env[61998]: DEBUG nova.compute.manager [req-941ee872-0874-4827-a436-fc5a08ee531e req-17f7fd24-e44b-4bc4-a97b-fda9a68e2cd8 service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Received event network-vif-deleted-74a584ad-09cd-42a1-bd2b-3262ccdfb78d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 974.849184] env[61998]: INFO nova.compute.manager [req-941ee872-0874-4827-a436-fc5a08ee531e req-17f7fd24-e44b-4bc4-a97b-fda9a68e2cd8 service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Neutron deleted interface 74a584ad-09cd-42a1-bd2b-3262ccdfb78d; detaching it from the instance and deleting it from the info cache [ 974.849363] env[61998]: DEBUG nova.network.neutron [req-941ee872-0874-4827-a436-fc5a08ee531e req-17f7fd24-e44b-4bc4-a97b-fda9a68e2cd8 service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.933847] env[61998]: DEBUG nova.compute.manager [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Stashing vm_state: active {{(pid=61998) _prep_resize /opt/stack/nova/nova/compute/manager.py:5923}} [ 975.245263] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.245647] env[61998]: DEBUG nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Instance network_info: |[{"id": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "address": "fa:16:3e:34:5d:81", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c7c097-27", "ovs_interfaceid": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 975.246194] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:5d:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec903a9-d773-4d7c-a80c-c2533be346fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6c7c097-27f3-40b7-b085-a8b5f170210d', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 975.253952] env[61998]: DEBUG oslo.service.loopingcall [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.254226] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 975.254535] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c326e456-0fee-40a0-8ce1-1b87309b0753 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.269938] env[61998]: DEBUG nova.network.neutron [-] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.277806] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 975.277806] env[61998]: value = "task-1388873" [ 975.277806] env[61998]: _type = "Task" [ 975.277806] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.287441] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388873, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.353171] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-681b65ca-e2f7-45a9-bba1-a5150eaf2014 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.364253] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665531cd-b24a-43a0-8b8e-e74202a56c67 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.391488] env[61998]: DEBUG nova.compute.manager [req-941ee872-0874-4827-a436-fc5a08ee531e req-17f7fd24-e44b-4bc4-a97b-fda9a68e2cd8 service nova] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Detach interface failed, port_id=74a584ad-09cd-42a1-bd2b-3262ccdfb78d, reason: Instance 4ab6f2f2-07c8-4477-a433-b6408cd919bc could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 975.436545] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "9de9cc49-7a81-4975-88df-5351125b180c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.436769] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "9de9cc49-7a81-4975-88df-5351125b180c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.456350] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.456625] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.773340] env[61998]: INFO nova.compute.manager [-] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Took 1.37 seconds to deallocate network for instance. [ 975.787601] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388873, 'name': CreateVM_Task, 'duration_secs': 0.319364} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.787738] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.788438] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.788632] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.788982] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 975.789313] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ace87d5-fa09-4b06-8f7e-b2d609c6a9ca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.793815] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 975.793815] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]520c1bae-e44e-adf5-631a-84546852786f" [ 975.793815] env[61998]: _type = "Task" [ 975.793815] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.801429] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]520c1bae-e44e-adf5-631a-84546852786f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.939269] env[61998]: DEBUG nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 975.961870] env[61998]: INFO nova.compute.claims [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.279911] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.303885] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]520c1bae-e44e-adf5-631a-84546852786f, 'name': SearchDatastore_Task, 'duration_secs': 0.025947} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.304216] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.304448] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 976.304684] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.304832] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.305022] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.305285] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4086d869-e451-419b-84a0-64d65275f165 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.319513] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.319677] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 976.320352] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e93afeeb-5a86-4328-ad89-ed03498289f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.325103] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 976.325103] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528d2bfa-fbe9-a298-77ff-86761490f921" [ 976.325103] env[61998]: _type = "Task" [ 976.325103] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.332126] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528d2bfa-fbe9-a298-77ff-86761490f921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.457621] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.467293] env[61998]: INFO nova.compute.resource_tracker [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating resource usage from migration ab27e194-c511-4930-a465-c5fccf87b277 [ 976.550153] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0dcd2bd-fc62-4aaa-922e-38e5afeded55 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.557938] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2018b9-4584-4dd7-be34-29ca6ee2f94d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.588984] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7e88d3-408e-4181-9bb3-01e9c6122431 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.596063] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a5176b-8c9f-43e9-b551-85aaa85e40ec {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.609820] env[61998]: DEBUG nova.compute.provider_tree [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.835871] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528d2bfa-fbe9-a298-77ff-86761490f921, 'name': SearchDatastore_Task, 'duration_secs': 0.038717} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.836741] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-154a7eaf-3f1f-4e7b-9675-5172ec5d33e5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.841563] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 976.841563] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5213bbe6-725e-21f0-23c7-f941b83272d9" [ 976.841563] env[61998]: _type = "Task" [ 976.841563] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.848883] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5213bbe6-725e-21f0-23c7-f941b83272d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.113352] env[61998]: DEBUG nova.scheduler.client.report [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 977.351668] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5213bbe6-725e-21f0-23c7-f941b83272d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009613} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.351923] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.352198] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 101d9d29-24b4-4c4d-bf7a-70abfd200be9/101d9d29-24b4-4c4d-bf7a-70abfd200be9.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 977.352444] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d299507-447a-42ac-b41c-d14b6f33b6e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.358721] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 977.358721] env[61998]: value = "task-1388874" [ 977.358721] env[61998]: _type = "Task" [ 977.358721] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.366037] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.618192] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.161s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.618584] env[61998]: INFO nova.compute.manager [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Migrating [ 977.625703] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.346s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.626024] env[61998]: DEBUG nova.objects.instance [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lazy-loading 'resources' on Instance uuid 4ab6f2f2-07c8-4477-a433-b6408cd919bc {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.868497] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46842} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.868747] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 101d9d29-24b4-4c4d-bf7a-70abfd200be9/101d9d29-24b4-4c4d-bf7a-70abfd200be9.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.868945] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.869215] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53cd8fd4-a8b5-48a9-823f-d0021b0da2a8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.875098] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 977.875098] env[61998]: value = "task-1388875" [ 977.875098] env[61998]: _type = "Task" [ 977.875098] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.883056] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.137656] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.137799] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.138084] env[61998]: DEBUG nova.network.neutron [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.226149] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7da5760-ce1e-46dc-bcc0-4244c8221c01 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.233353] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1ccfd1-d0e7-4223-b7da-3e8ae63911fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.262974] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22420292-29cb-408a-82d4-25370ac07ce7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.271363] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30beeee-7ff4-4ae3-b5b2-01d0739ecc0e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.285898] env[61998]: DEBUG nova.compute.provider_tree [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.384115] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060865} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.384391] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.385143] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857b6ad5-725f-47e8-9e31-fafe0609d2d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.406319] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 101d9d29-24b4-4c4d-bf7a-70abfd200be9/101d9d29-24b4-4c4d-bf7a-70abfd200be9.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.406505] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b4fcefd-1c54-4e59-9ba7-f3bb34773946 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.425116] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 978.425116] env[61998]: value = "task-1388876" [ 978.425116] env[61998]: _type = "Task" [ 978.425116] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.431940] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.789256] env[61998]: DEBUG nova.scheduler.client.report [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 978.825883] env[61998]: DEBUG nova.network.neutron [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance_info_cache with network_info: [{"id": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "address": "fa:16:3e:c8:1f:fe", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755f7df1-f8", "ovs_interfaceid": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.934810] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388876, 'name': ReconfigVM_Task, 'duration_secs': 0.2529} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.935111] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 101d9d29-24b4-4c4d-bf7a-70abfd200be9/101d9d29-24b4-4c4d-bf7a-70abfd200be9.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.935812] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9d419dc-b82b-495d-8522-30007feae29c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.942422] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 978.942422] env[61998]: value = "task-1388877" [ 978.942422] env[61998]: _type = "Task" [ 978.942422] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.950078] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388877, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.295576] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.670s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.298025] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.840s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.299760] env[61998]: INFO nova.compute.claims [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.313750] env[61998]: INFO nova.scheduler.client.report [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Deleted allocations for instance 4ab6f2f2-07c8-4477-a433-b6408cd919bc [ 979.327960] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.451752] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388877, 'name': Rename_Task, 'duration_secs': 0.140916} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.451985] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.452225] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38b69c90-4e7e-4823-9e4c-dfce0301e9bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.458628] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 979.458628] env[61998]: value = "task-1388878" [ 979.458628] env[61998]: _type = "Task" [ 979.458628] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.465751] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388878, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.586139] env[61998]: DEBUG oslo_concurrency.lockutils [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.586454] env[61998]: DEBUG oslo_concurrency.lockutils [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.820386] env[61998]: DEBUG oslo_concurrency.lockutils [None req-36fe4417-bbe3-4dbe-b1cc-3416a6c4482a tempest-ServerMetadataTestJSON-816861830 tempest-ServerMetadataTestJSON-816861830-project-member] Lock "4ab6f2f2-07c8-4477-a433-b6408cd919bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.549s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.969419] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.090012] env[61998]: INFO nova.compute.manager [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Detaching volume 834152ba-512a-44f5-b453-523da9f699e7 [ 980.122517] env[61998]: INFO nova.virt.block_device [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Attempting to driver detach volume 834152ba-512a-44f5-b453-523da9f699e7 from mountpoint /dev/sdb [ 980.122960] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Volume detach. Driver type: vmdk {{(pid=61998) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 980.122960] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294760', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'name': 'volume-834152ba-512a-44f5-b453-523da9f699e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2d0b199f-e0f1-42e0-afb5-e08602aebf01', 'attached_at': '', 'detached_at': '', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'serial': '834152ba-512a-44f5-b453-523da9f699e7'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 980.123889] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940e5af6-bebd-47ad-b7ad-a1dfc6dc6523 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.146702] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2258a4cc-d9d5-4a7c-afee-b9804e88f8aa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.153667] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb86e1ca-6a38-40ae-a007-1428036ef01c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.173472] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2762bf25-2a2a-4612-b139-1f75ce592b39 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.187794] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] The volume has not been displaced from its original location: [datastore2] volume-834152ba-512a-44f5-b453-523da9f699e7/volume-834152ba-512a-44f5-b453-523da9f699e7.vmdk. No consolidation needed. {{(pid=61998) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 980.193349] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfiguring VM instance instance-00000042 to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 980.193643] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0942746-e056-4a17-9c2a-082ed8bb0d08 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.211294] env[61998]: DEBUG oslo_vmware.api [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 980.211294] env[61998]: value = "task-1388879" [ 980.211294] env[61998]: _type = "Task" [ 980.211294] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.218974] env[61998]: DEBUG oslo_vmware.api [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.404341] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f16e07e-e6f5-424a-a1f6-56c501b7fb24 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.412975] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793b8221-4b3b-4857-ba17-7dd6b997fc08 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.443737] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8ce273-cded-4479-b276-d87a5ea9254d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.450997] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b655ab1b-be3b-4e08-bdcc-ba80df3dae44 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.467438] env[61998]: DEBUG nova.compute.provider_tree [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.476490] env[61998]: DEBUG oslo_vmware.api [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388878, 'name': PowerOnVM_Task, 'duration_secs': 0.631529} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.477407] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.477609] env[61998]: INFO nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Took 8.78 seconds to spawn the instance on the hypervisor. [ 980.477799] env[61998]: DEBUG nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 980.478658] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104daecd-fb64-4ebc-ad28-57a65482c938 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.721988] env[61998]: DEBUG oslo_vmware.api [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388879, 'name': ReconfigVM_Task, 'duration_secs': 0.237274} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.722295] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Reconfigured VM instance instance-00000042 to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 980.726891] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71299782-a469-47d4-8c97-036906c10e0d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.741437] env[61998]: DEBUG oslo_vmware.api [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 980.741437] env[61998]: value = "task-1388880" [ 980.741437] env[61998]: _type = "Task" [ 980.741437] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.749171] env[61998]: DEBUG oslo_vmware.api [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.842731] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0f2075-041f-4b41-9128-a618d1ca5dc0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.861457] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance '5789b2bc-a8c5-4986-bb53-7175cd566142' progress to 0 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 980.973088] env[61998]: DEBUG nova.scheduler.client.report [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 980.996749] env[61998]: INFO nova.compute.manager [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Took 14.49 seconds to build instance. [ 981.251876] env[61998]: DEBUG oslo_vmware.api [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388880, 'name': ReconfigVM_Task, 'duration_secs': 0.140782} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.252193] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-294760', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'name': 'volume-834152ba-512a-44f5-b453-523da9f699e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2d0b199f-e0f1-42e0-afb5-e08602aebf01', 'attached_at': '', 'detached_at': '', 'volume_id': '834152ba-512a-44f5-b453-523da9f699e7', 'serial': '834152ba-512a-44f5-b453-523da9f699e7'} {{(pid=61998) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 981.368542] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.369422] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75cdae0c-702f-40e6-afd8-486c397e987c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.379023] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 981.379023] env[61998]: value = "task-1388881" [ 981.379023] env[61998]: _type = "Task" [ 981.379023] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.393798] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.479082] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.181s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.479645] env[61998]: DEBUG nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 981.500281] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4b735d34-bd5d-49be-b604-2bc7f828d14d tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.811301] env[61998]: DEBUG nova.objects.instance [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'flavor' on Instance uuid 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.888573] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388881, 'name': PowerOffVM_Task, 'duration_secs': 0.169334} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.888874] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.889191] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance '5789b2bc-a8c5-4986-bb53-7175cd566142' progress to 17 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 981.985392] env[61998]: DEBUG nova.compute.utils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 981.986890] env[61998]: DEBUG nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 981.987085] env[61998]: DEBUG nova.network.neutron [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.042991] env[61998]: DEBUG nova.policy [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '744da696f7c64f62ae04195aa737fab4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c75c9b7c8d6b441d80fe512c37c88679', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 982.336351] env[61998]: DEBUG nova.network.neutron [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Successfully created port: 50f11194-a00f-483a-9c48-b0334da2581b {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.396751] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 982.397012] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 982.397185] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.397371] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 982.397626] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.397719] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 982.397917] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 982.398115] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 982.398296] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 982.398473] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 982.398690] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 982.403685] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2d85116-a5a3-4784-bc55-a7808d11716c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.420086] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 982.420086] env[61998]: value = "task-1388882" [ 982.420086] env[61998]: _type = "Task" [ 982.420086] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.428598] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388882, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.490775] env[61998]: DEBUG nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 982.821014] env[61998]: DEBUG oslo_concurrency.lockutils [None req-82f14336-7fcb-41ca-8b52-a3c4c0097a1a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.234s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.822724] env[61998]: DEBUG nova.compute.manager [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Stashing vm_state: active {{(pid=61998) _prep_resize /opt/stack/nova/nova/compute/manager.py:5923}} [ 982.930360] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388882, 'name': ReconfigVM_Task, 'duration_secs': 0.155794} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.930749] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance '5789b2bc-a8c5-4986-bb53-7175cd566142' progress to 33 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 983.343735] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.343735] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.437292] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.437619] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.437788] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.437977] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.438144] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.438294] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.438549] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.438742] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.438913] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.439206] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.439473] env[61998]: DEBUG nova.virt.hardware [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.445782] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 983.445782] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ed68142-a264-4e4d-8de8-75973f0cdc18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.464778] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 983.464778] env[61998]: value = "task-1388883" [ 983.464778] env[61998]: _type = "Task" [ 983.464778] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.473386] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388883, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.504405] env[61998]: DEBUG nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 983.530918] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.531241] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.531446] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.531678] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.531854] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.532051] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.532273] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.532467] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.532669] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.532884] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.533116] env[61998]: DEBUG nova.virt.hardware [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.534371] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcdb105-5efc-47b6-bcb1-10e5987217de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.543147] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e24b7e-e97c-4a3a-ba50-7c6e22d3a7b0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.798020] env[61998]: DEBUG nova.compute.manager [req-d90a16ef-2e62-4f5c-af21-f3b0b9d1fcca req-cba1cf93-0c50-46f5-b4eb-4cbb7795d3d7 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Received event network-vif-plugged-50f11194-a00f-483a-9c48-b0334da2581b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 983.798278] env[61998]: DEBUG oslo_concurrency.lockutils [req-d90a16ef-2e62-4f5c-af21-f3b0b9d1fcca req-cba1cf93-0c50-46f5-b4eb-4cbb7795d3d7 service nova] Acquiring lock "9de9cc49-7a81-4975-88df-5351125b180c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.798527] env[61998]: DEBUG oslo_concurrency.lockutils [req-d90a16ef-2e62-4f5c-af21-f3b0b9d1fcca req-cba1cf93-0c50-46f5-b4eb-4cbb7795d3d7 service nova] Lock "9de9cc49-7a81-4975-88df-5351125b180c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.798714] env[61998]: DEBUG oslo_concurrency.lockutils [req-d90a16ef-2e62-4f5c-af21-f3b0b9d1fcca req-cba1cf93-0c50-46f5-b4eb-4cbb7795d3d7 service nova] Lock "9de9cc49-7a81-4975-88df-5351125b180c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.798949] env[61998]: DEBUG nova.compute.manager [req-d90a16ef-2e62-4f5c-af21-f3b0b9d1fcca req-cba1cf93-0c50-46f5-b4eb-4cbb7795d3d7 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] No waiting events found dispatching network-vif-plugged-50f11194-a00f-483a-9c48-b0334da2581b {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 983.799138] env[61998]: WARNING nova.compute.manager [req-d90a16ef-2e62-4f5c-af21-f3b0b9d1fcca req-cba1cf93-0c50-46f5-b4eb-4cbb7795d3d7 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Received unexpected event network-vif-plugged-50f11194-a00f-483a-9c48-b0334da2581b for instance with vm_state building and task_state spawning. [ 983.851301] env[61998]: INFO nova.compute.claims [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.886871] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.887143] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.887359] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.887578] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.887756] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.890889] env[61998]: INFO nova.compute.manager [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Terminating instance [ 983.893155] env[61998]: DEBUG nova.compute.manager [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 983.893567] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.894291] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed1813c-8b5c-4aea-b831-f683d3b853f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.901919] env[61998]: DEBUG nova.network.neutron [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Successfully updated port: 50f11194-a00f-483a-9c48-b0334da2581b {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.906169] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.906843] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f1bd825-3459-45b5-81fe-faa7c8b1b4ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.913033] env[61998]: DEBUG oslo_vmware.api [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 983.913033] env[61998]: value = "task-1388884" [ 983.913033] env[61998]: _type = "Task" [ 983.913033] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.922643] env[61998]: DEBUG oslo_vmware.api [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.976085] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388883, 'name': ReconfigVM_Task, 'duration_secs': 0.160534} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.976899] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 983.977329] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da50b04f-1bf1-4388-b736-ab5081eec458 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.005773] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 5789b2bc-a8c5-4986-bb53-7175cd566142/5789b2bc-a8c5-4986-bb53-7175cd566142.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.006125] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f6d6c8b-0fda-4fc9-8110-aaf39f6773d7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.025100] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 984.025100] env[61998]: value = "task-1388885" [ 984.025100] env[61998]: _type = "Task" [ 984.025100] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.033430] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388885, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.360165] env[61998]: INFO nova.compute.resource_tracker [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating resource usage from migration 0a805f44-1861-4bad-a52c-51f18cb0e67c [ 984.403652] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-9de9cc49-7a81-4975-88df-5351125b180c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.403801] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-9de9cc49-7a81-4975-88df-5351125b180c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.403945] env[61998]: DEBUG nova.network.neutron [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 984.427618] env[61998]: DEBUG oslo_vmware.api [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388884, 'name': PowerOffVM_Task, 'duration_secs': 0.209329} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.429945] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.430209] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.430566] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da191da8-a2f8-40fc-bd2b-008bfe8d6fbb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.461887] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bd8d42-c886-4650-92ca-59182fb8ac4a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.469121] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0da2ed-4a72-4e06-b7a8-1b65dcb0deb7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.499736] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d4d69d-3ce8-44e7-887b-b1c32e421920 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.502420] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.502507] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.502658] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleting the datastore file [datastore1] 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.502886] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b59aac3-2571-4440-a2a8-18a40493b6e5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.509812] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f585937-e3e4-4863-a3ce-d04fdab5e32a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.513427] env[61998]: DEBUG oslo_vmware.api [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 984.513427] env[61998]: value = "task-1388887" [ 984.513427] env[61998]: _type = "Task" [ 984.513427] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.524227] env[61998]: DEBUG nova.compute.provider_tree [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.531502] env[61998]: DEBUG oslo_vmware.api [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.536050] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388885, 'name': ReconfigVM_Task, 'duration_secs': 0.33856} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.536915] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 5789b2bc-a8c5-4986-bb53-7175cd566142/5789b2bc-a8c5-4986-bb53-7175cd566142.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.537719] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance '5789b2bc-a8c5-4986-bb53-7175cd566142' progress to 50 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 984.934637] env[61998]: DEBUG nova.network.neutron [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.030138] env[61998]: DEBUG nova.scheduler.client.report [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 985.032475] env[61998]: DEBUG oslo_vmware.api [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135106} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.032718] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.032903] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.033097] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.033333] env[61998]: INFO nova.compute.manager [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Took 1.14 seconds to destroy the instance on the hypervisor. [ 985.033648] env[61998]: DEBUG oslo.service.loopingcall [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.033955] env[61998]: DEBUG nova.compute.manager [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 985.034051] env[61998]: DEBUG nova.network.neutron [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 985.043691] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1968ec9d-db94-42c5-a80f-395800d18f73 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.068379] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15f51a9-64d5-4fe8-a01e-93114eef6e3b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.092516] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance '5789b2bc-a8c5-4986-bb53-7175cd566142' progress to 67 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 985.186097] env[61998]: DEBUG nova.network.neutron [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Updating instance_info_cache with network_info: [{"id": "50f11194-a00f-483a-9c48-b0334da2581b", "address": "fa:16:3e:63:a0:a9", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f11194-a0", "ovs_interfaceid": "50f11194-a00f-483a-9c48-b0334da2581b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.502893] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.503141] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.537755] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.194s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.537978] env[61998]: INFO nova.compute.manager [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Migrating [ 985.641311] env[61998]: DEBUG nova.network.neutron [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Port 755f7df1-f8ab-44a0-92fd-7ffcdc053632 binding to destination host cpu-1 is already ACTIVE {{(pid=61998) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 985.688793] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-9de9cc49-7a81-4975-88df-5351125b180c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.689183] env[61998]: DEBUG nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Instance network_info: |[{"id": "50f11194-a00f-483a-9c48-b0334da2581b", "address": "fa:16:3e:63:a0:a9", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f11194-a0", "ovs_interfaceid": "50f11194-a00f-483a-9c48-b0334da2581b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 985.689561] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:a0:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50f11194-a00f-483a-9c48-b0334da2581b', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 985.697116] env[61998]: DEBUG oslo.service.loopingcall [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.699475] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 985.699721] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e61deed9-d419-4367-b61f-07574f81d3ae {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.719154] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 985.719154] env[61998]: value = "task-1388888" [ 985.719154] env[61998]: _type = "Task" [ 985.719154] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.728019] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388888, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.828749] env[61998]: DEBUG nova.compute.manager [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Received event network-changed-50f11194-a00f-483a-9c48-b0334da2581b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 985.828980] env[61998]: DEBUG nova.compute.manager [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Refreshing instance network info cache due to event network-changed-50f11194-a00f-483a-9c48-b0334da2581b. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 985.829209] env[61998]: DEBUG oslo_concurrency.lockutils [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] Acquiring lock "refresh_cache-9de9cc49-7a81-4975-88df-5351125b180c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.829379] env[61998]: DEBUG oslo_concurrency.lockutils [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] Acquired lock "refresh_cache-9de9cc49-7a81-4975-88df-5351125b180c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.829526] env[61998]: DEBUG nova.network.neutron [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Refreshing network info cache for port 50f11194-a00f-483a-9c48-b0334da2581b {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 986.005755] env[61998]: DEBUG nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 986.053600] env[61998]: DEBUG nova.network.neutron [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.054958] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.055252] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.055539] env[61998]: DEBUG nova.network.neutron [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.229488] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388888, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.526491] env[61998]: DEBUG nova.network.neutron [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Updated VIF entry in instance network info cache for port 50f11194-a00f-483a-9c48-b0334da2581b. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 986.526930] env[61998]: DEBUG nova.network.neutron [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Updating instance_info_cache with network_info: [{"id": "50f11194-a00f-483a-9c48-b0334da2581b", "address": "fa:16:3e:63:a0:a9", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f11194-a0", "ovs_interfaceid": "50f11194-a00f-483a-9c48-b0334da2581b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.528907] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.529172] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.530674] env[61998]: INFO nova.compute.claims [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 986.561653] env[61998]: INFO nova.compute.manager [-] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Took 1.53 seconds to deallocate network for instance. [ 986.667966] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.668210] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.668386] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.730941] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388888, 'name': CreateVM_Task, 'duration_secs': 0.611941} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.731128] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 986.731786] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.731958] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.732313] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 986.732572] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-826739a1-19c7-4d76-9271-02c013019fd8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.737692] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 986.737692] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c1ec1c-1b12-2484-7ea7-7f4a4d0b0e7e" [ 986.737692] env[61998]: _type = "Task" [ 986.737692] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.746847] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c1ec1c-1b12-2484-7ea7-7f4a4d0b0e7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.760866] env[61998]: DEBUG nova.network.neutron [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance_info_cache with network_info: [{"id": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "address": "fa:16:3e:34:5d:81", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c7c097-27", "ovs_interfaceid": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.034206] env[61998]: DEBUG oslo_concurrency.lockutils [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] Releasing lock "refresh_cache-9de9cc49-7a81-4975-88df-5351125b180c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.034483] env[61998]: DEBUG nova.compute.manager [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Received event network-vif-deleted-da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 987.034666] env[61998]: INFO nova.compute.manager [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Neutron deleted interface da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5; detaching it from the instance and deleting it from the info cache [ 987.034838] env[61998]: DEBUG nova.network.neutron [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.070485] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.247926] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c1ec1c-1b12-2484-7ea7-7f4a4d0b0e7e, 'name': SearchDatastore_Task, 'duration_secs': 0.00913} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.248263] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.248653] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.248955] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.249131] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.249318] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.249573] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf35c9aa-d193-491f-b186-28a5aff9e6de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.257781] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.257959] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.258683] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-639bca15-180c-4d15-884b-732767145d73 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.263745] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.265098] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 987.265098] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52602475-1790-e520-91ab-bfbc1e783ada" [ 987.265098] env[61998]: _type = "Task" [ 987.265098] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.274798] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52602475-1790-e520-91ab-bfbc1e783ada, 'name': SearchDatastore_Task, 'duration_secs': 0.008796} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.275529] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-631a6302-ea4b-4ed2-8c5f-f681b12c1c2d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.281399] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 987.281399] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52fbfda1-471d-16b2-0564-57cbcc439ab3" [ 987.281399] env[61998]: _type = "Task" [ 987.281399] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.289437] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52fbfda1-471d-16b2-0564-57cbcc439ab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.541820] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47f3fbb5-624a-4581-bcdf-1a3655f054aa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.551894] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f3448d-3984-49fa-b41b-dbe4281695ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.579641] env[61998]: DEBUG nova.compute.manager [req-24bea687-0136-4d91-a84c-1eec5bfb99bf req-91423a2f-812e-4964-bfe0-428414e6dc96 service nova] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Detach interface failed, port_id=da2ff8f0-d73d-4e2b-bc2f-f5961e0265f5, reason: Instance 2d0b199f-e0f1-42e0-afb5-e08602aebf01 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 987.662305] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3fc890-b09b-4776-9f3a-7048dabbb656 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.669735] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c330b5b-3ec4-4127-921c-753a3f63de37 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.704192] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.704391] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.704574] env[61998]: DEBUG nova.network.neutron [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.706872] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdb2096-616e-4eab-8990-57a8aec38014 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.715260] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4968ff8-138b-4970-8be8-e0b17937e3c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.730987] env[61998]: DEBUG nova.compute.provider_tree [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.791116] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52fbfda1-471d-16b2-0564-57cbcc439ab3, 'name': SearchDatastore_Task, 'duration_secs': 0.009196} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.791381] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.791637] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 9de9cc49-7a81-4975-88df-5351125b180c/9de9cc49-7a81-4975-88df-5351125b180c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 987.792151] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5700e56a-6fcb-4502-bf70-d47de4ee4bf2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.797798] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 987.797798] env[61998]: value = "task-1388889" [ 987.797798] env[61998]: _type = "Task" [ 987.797798] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.805747] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.234255] env[61998]: DEBUG nova.scheduler.client.report [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 988.308125] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494878} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.308409] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 9de9cc49-7a81-4975-88df-5351125b180c/9de9cc49-7a81-4975-88df-5351125b180c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 988.308632] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.308908] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2044770-2245-40e5-98ed-a9eeaec0d052 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.315212] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 988.315212] env[61998]: value = "task-1388890" [ 988.315212] env[61998]: _type = "Task" [ 988.315212] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.324574] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.508998] env[61998]: DEBUG nova.network.neutron [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance_info_cache with network_info: [{"id": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "address": "fa:16:3e:c8:1f:fe", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755f7df1-f8", "ovs_interfaceid": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.599416] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dd057f0c-12c5-4b97-8229-5c2bdf8997d4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.599691] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dd057f0c-12c5-4b97-8229-5c2bdf8997d4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.600023] env[61998]: DEBUG nova.objects.instance [None req-dd057f0c-12c5-4b97-8229-5c2bdf8997d4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'flavor' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.740848] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.741426] env[61998]: DEBUG nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 988.743914] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.674s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.745026] env[61998]: DEBUG nova.objects.instance [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'resources' on Instance uuid 2d0b199f-e0f1-42e0-afb5-e08602aebf01 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.778544] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903b88a7-b394-40dd-9918-170e33cee853 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.797217] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance '101d9d29-24b4-4c4d-bf7a-70abfd200be9' progress to 0 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 988.824130] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06226} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.824388] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.825200] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c7b63f-0166-46a9-93c5-9dd89d711db4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.846485] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 9de9cc49-7a81-4975-88df-5351125b180c/9de9cc49-7a81-4975-88df-5351125b180c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.847370] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a8e5ec5-89a5-4cd0-a009-0fd9e7dc562e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.866862] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 988.866862] env[61998]: value = "task-1388891" [ 988.866862] env[61998]: _type = "Task" [ 988.866862] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.874658] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.011932] env[61998]: DEBUG oslo_concurrency.lockutils [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.104336] env[61998]: DEBUG nova.objects.instance [None req-dd057f0c-12c5-4b97-8229-5c2bdf8997d4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'pci_requests' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.247676] env[61998]: DEBUG nova.compute.utils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 989.252658] env[61998]: DEBUG nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 989.252658] env[61998]: DEBUG nova.network.neutron [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 989.302109] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.303521] env[61998]: DEBUG nova.policy [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b474e6789a884ad8bef3e98832168ff2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2b8a39c23bc46008370ed877054464e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 989.304788] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6dcad28d-b9fc-486d-b5e0-4202353d6e3b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.314645] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 989.314645] env[61998]: value = "task-1388892" [ 989.314645] env[61998]: _type = "Task" [ 989.314645] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.322829] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.360522] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50cd6cc-99e5-415c-853b-b9918d70ed51 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.371619] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f782fc0f-88cb-49ab-a4ac-a95138bd7423 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.406622] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd59cff-8383-4449-85a2-4e2d7347b2f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.409341] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388891, 'name': ReconfigVM_Task, 'duration_secs': 0.280433} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.409615] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 9de9cc49-7a81-4975-88df-5351125b180c/9de9cc49-7a81-4975-88df-5351125b180c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.410604] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbef8dec-e7b7-4551-8e7d-26915cfd0113 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.415262] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98151385-49e5-4924-a475-44788fc9a707 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.420297] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 989.420297] env[61998]: value = "task-1388893" [ 989.420297] env[61998]: _type = "Task" [ 989.420297] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.431014] env[61998]: DEBUG nova.compute.provider_tree [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.437186] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388893, 'name': Rename_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.538506] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08076715-943f-4c35-96e2-26ef9ddf1ab6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.558956] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f61691a-0f01-428e-bf2a-7f339d853601 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.566144] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance '5789b2bc-a8c5-4986-bb53-7175cd566142' progress to 83 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 989.606795] env[61998]: DEBUG nova.objects.base [None req-dd057f0c-12c5-4b97-8229-5c2bdf8997d4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Object Instance<58626303-4d70-48bb-9aaf-1b54cef92a76> lazy-loaded attributes: flavor,pci_requests {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 989.607047] env[61998]: DEBUG nova.network.neutron [None req-dd057f0c-12c5-4b97-8229-5c2bdf8997d4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 989.685955] env[61998]: DEBUG nova.network.neutron [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Successfully created port: dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 989.752426] env[61998]: DEBUG nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 989.775731] env[61998]: DEBUG oslo_concurrency.lockutils [None req-dd057f0c-12c5-4b97-8229-5c2bdf8997d4 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.176s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.824027] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388892, 'name': PowerOffVM_Task, 'duration_secs': 0.316313} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.824351] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.824485] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance '101d9d29-24b4-4c4d-bf7a-70abfd200be9' progress to 17 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 989.930676] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388893, 'name': Rename_Task, 'duration_secs': 0.141681} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.930958] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.931203] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2adc1ccb-2614-4b50-ae66-3a4a2022d54b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.933657] env[61998]: DEBUG nova.scheduler.client.report [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 989.938236] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 989.938236] env[61998]: value = "task-1388894" [ 989.938236] env[61998]: _type = "Task" [ 989.938236] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.945965] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.072783] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.073175] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd78a486-84e5-4aa2-b758-81d77e7e0121 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.080603] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 990.080603] env[61998]: value = "task-1388895" [ 990.080603] env[61998]: _type = "Task" [ 990.080603] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.088730] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.330569] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.331067] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.331067] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.331288] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.331445] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.331601] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.331851] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.332033] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.332274] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.332468] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.332648] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.338507] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ed73a8e-3335-4f86-b713-7bf7d3405971 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.354833] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 990.354833] env[61998]: value = "task-1388896" [ 990.354833] env[61998]: _type = "Task" [ 990.354833] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.363973] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388896, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.438835] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.450576] env[61998]: DEBUG oslo_vmware.api [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388894, 'name': PowerOnVM_Task, 'duration_secs': 0.432406} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.450886] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.451118] env[61998]: INFO nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Took 6.95 seconds to spawn the instance on the hypervisor. [ 990.451304] env[61998]: DEBUG nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 990.452143] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71485dba-ece1-48e1-96c9-a0c852c83d4a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.462988] env[61998]: INFO nova.scheduler.client.report [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted allocations for instance 2d0b199f-e0f1-42e0-afb5-e08602aebf01 [ 990.591480] env[61998]: DEBUG oslo_vmware.api [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388895, 'name': PowerOnVM_Task, 'duration_secs': 0.414768} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.591739] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.591927] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-48f0866e-65cc-428f-b51f-a8cf674b4feb tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance '5789b2bc-a8c5-4986-bb53-7175cd566142' progress to 100 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.762237] env[61998]: DEBUG nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 990.792314] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.792682] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.792926] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.793221] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.793452] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.793687] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.794008] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.794260] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.794515] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.794766] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.795042] env[61998]: DEBUG nova.virt.hardware [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.796222] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57127487-5cab-4ccf-b346-e04f3a5cfaa7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.806416] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90938e76-2424-4e57-99ac-2acaf242f66c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.864497] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388896, 'name': ReconfigVM_Task, 'duration_secs': 0.18949} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.864859] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance '101d9d29-24b4-4c4d-bf7a-70abfd200be9' progress to 33 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.971605] env[61998]: DEBUG oslo_concurrency.lockutils [None req-45a67266-d53a-4c09-916f-9e39c8bea6ce tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "2d0b199f-e0f1-42e0-afb5-e08602aebf01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.084s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.973163] env[61998]: INFO nova.compute.manager [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Took 14.53 seconds to build instance. [ 991.130320] env[61998]: DEBUG nova.compute.manager [req-856521cf-d268-4735-94ae-2046d1a53d92 req-bfa413be-6537-41d2-8894-972bb12da799 service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Received event network-vif-plugged-dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 991.130549] env[61998]: DEBUG oslo_concurrency.lockutils [req-856521cf-d268-4735-94ae-2046d1a53d92 req-bfa413be-6537-41d2-8894-972bb12da799 service nova] Acquiring lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.130762] env[61998]: DEBUG oslo_concurrency.lockutils [req-856521cf-d268-4735-94ae-2046d1a53d92 req-bfa413be-6537-41d2-8894-972bb12da799 service nova] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.130936] env[61998]: DEBUG oslo_concurrency.lockutils [req-856521cf-d268-4735-94ae-2046d1a53d92 req-bfa413be-6537-41d2-8894-972bb12da799 service nova] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.131142] env[61998]: DEBUG nova.compute.manager [req-856521cf-d268-4735-94ae-2046d1a53d92 req-bfa413be-6537-41d2-8894-972bb12da799 service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] No waiting events found dispatching network-vif-plugged-dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.131286] env[61998]: WARNING nova.compute.manager [req-856521cf-d268-4735-94ae-2046d1a53d92 req-bfa413be-6537-41d2-8894-972bb12da799 service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Received unexpected event network-vif-plugged-dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba for instance with vm_state building and task_state spawning. [ 991.207501] env[61998]: DEBUG nova.network.neutron [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Successfully updated port: dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 991.371899] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 991.372266] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 991.372315] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.372491] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 991.372643] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.372804] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 991.373056] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 991.373229] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 991.373401] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 991.373568] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 991.373744] env[61998]: DEBUG nova.virt.hardware [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 991.379364] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 991.379664] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41ad8ca7-8525-44ac-bd14-5fc52c89b827 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.398187] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 991.398187] env[61998]: value = "task-1388897" [ 991.398187] env[61998]: _type = "Task" [ 991.398187] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.406793] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388897, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.474841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c642b405-5b5a-4499-9f07-c7687be0e664 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "9de9cc49-7a81-4975-88df-5351125b180c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.038s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.647182] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.647182] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.647182] env[61998]: DEBUG nova.objects.instance [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'flavor' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.709527] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.709690] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.709856] env[61998]: DEBUG nova.network.neutron [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.908615] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388897, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.271317] env[61998]: DEBUG nova.network.neutron [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 992.293643] env[61998]: DEBUG nova.objects.instance [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'pci_requests' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.334017] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "9de9cc49-7a81-4975-88df-5351125b180c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.334157] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "9de9cc49-7a81-4975-88df-5351125b180c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.334306] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "9de9cc49-7a81-4975-88df-5351125b180c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.335022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "9de9cc49-7a81-4975-88df-5351125b180c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.335022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "9de9cc49-7a81-4975-88df-5351125b180c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.336861] env[61998]: INFO nova.compute.manager [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Terminating instance [ 992.338657] env[61998]: DEBUG nova.compute.manager [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 992.338917] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.339969] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3142b0bf-fa7d-4abe-bac8-734fe1206ab6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.347502] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.347754] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c9dc651-30f8-45f7-b7ed-292a40e9f0b2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.353103] env[61998]: DEBUG oslo_vmware.api [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 992.353103] env[61998]: value = "task-1388898" [ 992.353103] env[61998]: _type = "Task" [ 992.353103] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.362719] env[61998]: DEBUG oslo_vmware.api [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.397243] env[61998]: DEBUG nova.network.neutron [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Updating instance_info_cache with network_info: [{"id": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "address": "fa:16:3e:39:10:78", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbfafa6-90", "ovs_interfaceid": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.410737] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388897, 'name': ReconfigVM_Task, 'duration_secs': 0.527911} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.411089] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 992.411974] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b7b753-6168-487c-9736-e01326f63407 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.415813] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "5789b2bc-a8c5-4986-bb53-7175cd566142" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.415813] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.416029] env[61998]: DEBUG nova.compute.manager [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Going to confirm migration 2 {{(pid=61998) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5082}} [ 992.442998] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 101d9d29-24b4-4c4d-bf7a-70abfd200be9/101d9d29-24b4-4c4d-bf7a-70abfd200be9.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.444406] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-115bdd21-4385-40a0-b54a-0f904ea7c563 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.467729] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 992.467729] env[61998]: value = "task-1388899" [ 992.467729] env[61998]: _type = "Task" [ 992.467729] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.478178] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388899, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.796277] env[61998]: DEBUG nova.objects.base [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Object Instance<58626303-4d70-48bb-9aaf-1b54cef92a76> lazy-loaded attributes: flavor,pci_requests {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 992.796598] env[61998]: DEBUG nova.network.neutron [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 992.858818] env[61998]: DEBUG nova.policy [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 992.865913] env[61998]: DEBUG oslo_vmware.api [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388898, 'name': PowerOffVM_Task, 'duration_secs': 0.192039} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.866192] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.866366] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.866648] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36059eaa-bcb9-4fe6-9abc-d740135fd3a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.900171] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.900593] env[61998]: DEBUG nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Instance network_info: |[{"id": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "address": "fa:16:3e:39:10:78", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbfafa6-90", "ovs_interfaceid": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 992.901114] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:10:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '605f83bd-808c-4b54-922e-54b14690987a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.908702] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Creating folder: Project (f2b8a39c23bc46008370ed877054464e). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 992.909012] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66454770-e4ae-4d17-b1ef-a9297924e48e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.922312] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Created folder: Project (f2b8a39c23bc46008370ed877054464e) in parent group-v294665. [ 992.922662] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Creating folder: Instances. Parent ref: group-v294781. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 992.925103] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3b8cfc3-30cb-4055-9ecc-c03de6edabe5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.928240] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.928444] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.928628] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleting the datastore file [datastore1] 9de9cc49-7a81-4975-88df-5351125b180c {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.929224] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efa8e3bf-5efa-462e-94c1-a76eda9a44e5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.934879] env[61998]: DEBUG oslo_vmware.api [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 992.934879] env[61998]: value = "task-1388903" [ 992.934879] env[61998]: _type = "Task" [ 992.934879] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.938783] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Created folder: Instances in parent group-v294781. [ 992.939038] env[61998]: DEBUG oslo.service.loopingcall [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.939520] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.939715] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7651295f-4821-416e-9b50-6df2faba5391 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.957840] env[61998]: DEBUG oslo_vmware.api [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.962978] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.962978] env[61998]: value = "task-1388904" [ 992.962978] env[61998]: _type = "Task" [ 992.962978] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.972230] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388904, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.979944] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388899, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.215841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.216109] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.216320] env[61998]: DEBUG nova.network.neutron [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.216522] env[61998]: DEBUG nova.objects.instance [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lazy-loading 'info_cache' on Instance uuid 5789b2bc-a8c5-4986-bb53-7175cd566142 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.447500] env[61998]: DEBUG oslo_vmware.api [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.339496} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.447804] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.448017] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.448215] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.448410] env[61998]: INFO nova.compute.manager [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 993.448920] env[61998]: DEBUG oslo.service.loopingcall [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.449179] env[61998]: DEBUG nova.compute.manager [-] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 993.449273] env[61998]: DEBUG nova.network.neutron [-] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.472531] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388904, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.473669] env[61998]: DEBUG nova.network.neutron [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Successfully created port: 80f81837-014a-44f6-a2eb-6c05b1d74801 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.484634] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388899, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.572513] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.572750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.680828] env[61998]: DEBUG nova.compute.manager [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Received event network-changed-dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 993.681048] env[61998]: DEBUG nova.compute.manager [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Refreshing instance network info cache due to event network-changed-dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 993.681267] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] Acquiring lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.681413] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] Acquired lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.681571] env[61998]: DEBUG nova.network.neutron [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Refreshing network info cache for port dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 993.973541] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388904, 'name': CreateVM_Task, 'duration_secs': 0.552129} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.976634] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.977549] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.977549] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.977857] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 993.978419] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c733b29c-bbc1-4e14-9020-65f48967c45d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.982811] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388899, 'name': ReconfigVM_Task, 'duration_secs': 1.252953} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.983415] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 101d9d29-24b4-4c4d-bf7a-70abfd200be9/101d9d29-24b4-4c4d-bf7a-70abfd200be9.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 993.983781] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance '101d9d29-24b4-4c4d-bf7a-70abfd200be9' progress to 50 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 993.988477] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 993.988477] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cebe89-2d35-f74b-7d3e-a972f60b3e1d" [ 993.988477] env[61998]: _type = "Task" [ 993.988477] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.996050] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cebe89-2d35-f74b-7d3e-a972f60b3e1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.074993] env[61998]: DEBUG nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 994.396099] env[61998]: DEBUG nova.network.neutron [-] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.398124] env[61998]: DEBUG nova.network.neutron [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Updated VIF entry in instance network info cache for port dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.398444] env[61998]: DEBUG nova.network.neutron [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Updating instance_info_cache with network_info: [{"id": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "address": "fa:16:3e:39:10:78", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbfafa6-90", "ovs_interfaceid": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.412643] env[61998]: DEBUG nova.network.neutron [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance_info_cache with network_info: [{"id": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "address": "fa:16:3e:c8:1f:fe", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755f7df1-f8", "ovs_interfaceid": "755f7df1-f8ab-44a0-92fd-7ffcdc053632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.492069] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14018fb5-8923-48ec-a9e3-a804a67d0086 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.504077] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cebe89-2d35-f74b-7d3e-a972f60b3e1d, 'name': SearchDatastore_Task, 'duration_secs': 0.012463} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.517306] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.517560] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.517849] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.518019] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.518218] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 994.518578] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d35e309-4210-4647-8e5b-e81258ed4978 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.520827] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c56c53-e43c-4d2e-bf6d-43ec08d2ac18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.539066] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance '101d9d29-24b4-4c4d-bf7a-70abfd200be9' progress to 67 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.543201] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.543393] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.544300] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09d1035f-a7fd-4c02-9df3-5d5f3b5f4431 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.549127] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 994.549127] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5281fce0-7630-dc32-8f3f-d396a9a1f3f8" [ 994.549127] env[61998]: _type = "Task" [ 994.549127] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.556594] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5281fce0-7630-dc32-8f3f-d396a9a1f3f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.595964] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.596251] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.597780] env[61998]: INFO nova.compute.claims [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.905851] env[61998]: INFO nova.compute.manager [-] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Took 1.46 seconds to deallocate network for instance. [ 994.906305] env[61998]: DEBUG oslo_concurrency.lockutils [req-4b5fec41-d612-46e6-b456-6597b2da9b31 req-0204351e-a563-4d89-88ab-5262ba8dd3fb service nova] Releasing lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.916211] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-5789b2bc-a8c5-4986-bb53-7175cd566142" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.916211] env[61998]: DEBUG nova.objects.instance [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lazy-loading 'migration_context' on Instance uuid 5789b2bc-a8c5-4986-bb53-7175cd566142 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.059562] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5281fce0-7630-dc32-8f3f-d396a9a1f3f8, 'name': SearchDatastore_Task, 'duration_secs': 0.034472} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.060191] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e212d22-6bba-47f6-8b91-9b204dc0beaf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.065464] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 995.065464] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52948dd7-5303-57a4-d9b6-da6e16bf89d6" [ 995.065464] env[61998]: _type = "Task" [ 995.065464] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.073019] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52948dd7-5303-57a4-d9b6-da6e16bf89d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.078892] env[61998]: DEBUG nova.network.neutron [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Port e6c7c097-27f3-40b7-b085-a8b5f170210d binding to destination host cpu-1 is already ACTIVE {{(pid=61998) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 995.184891] env[61998]: DEBUG nova.network.neutron [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Successfully updated port: 80f81837-014a-44f6-a2eb-6c05b1d74801 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.415337] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.418241] env[61998]: DEBUG nova.objects.base [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Object Instance<5789b2bc-a8c5-4986-bb53-7175cd566142> lazy-loaded attributes: info_cache,migration_context {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 995.419206] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb202e2-1d6b-40de-8be3-dc822b3507e8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.438295] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f19b2ac-55eb-4f92-995f-4aa2c4a252b5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.444117] env[61998]: DEBUG oslo_vmware.api [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 995.444117] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52dad23a-3075-ac5e-1423-0e94f79ab516" [ 995.444117] env[61998]: _type = "Task" [ 995.444117] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.450644] env[61998]: DEBUG oslo_vmware.api [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52dad23a-3075-ac5e-1423-0e94f79ab516, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.575827] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52948dd7-5303-57a4-d9b6-da6e16bf89d6, 'name': SearchDatastore_Task, 'duration_secs': 0.008747} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.576235] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.576394] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/f87f913f-9e6e-4d64-9fe1-0a1fc8564b46.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 995.576653] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd7e1714-eda7-4e12-ba4b-240188c42885 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.585448] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 995.585448] env[61998]: value = "task-1388905" [ 995.585448] env[61998]: _type = "Task" [ 995.585448] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.593319] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.687651] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.687895] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.688140] env[61998]: DEBUG nova.network.neutron [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.715027] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199a6a23-8b49-4c85-bb34-0f04280ce4f7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.723302] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d71215-4ff4-4356-aa26-393ce73783a2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.762350] env[61998]: DEBUG nova.compute.manager [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Received event network-vif-deleted-50f11194-a00f-483a-9c48-b0334da2581b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 995.762574] env[61998]: DEBUG nova.compute.manager [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-vif-plugged-80f81837-014a-44f6-a2eb-6c05b1d74801 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 995.762775] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.763044] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.763255] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.763435] env[61998]: DEBUG nova.compute.manager [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] No waiting events found dispatching network-vif-plugged-80f81837-014a-44f6-a2eb-6c05b1d74801 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 995.763606] env[61998]: WARNING nova.compute.manager [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received unexpected event network-vif-plugged-80f81837-014a-44f6-a2eb-6c05b1d74801 for instance with vm_state active and task_state None. [ 995.763770] env[61998]: DEBUG nova.compute.manager [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-changed-80f81837-014a-44f6-a2eb-6c05b1d74801 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 995.763959] env[61998]: DEBUG nova.compute.manager [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing instance network info cache due to event network-changed-80f81837-014a-44f6-a2eb-6c05b1d74801. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 995.764148] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.764955] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8618a8-9107-448c-b7b3-887d51a29551 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.774031] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ee741c-22b3-4fb4-ab9a-4d989933c6bd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.789596] env[61998]: DEBUG nova.compute.provider_tree [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.954221] env[61998]: DEBUG oslo_vmware.api [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52dad23a-3075-ac5e-1423-0e94f79ab516, 'name': SearchDatastore_Task, 'duration_secs': 0.006545} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.954649] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.103553] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.103778] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.103957] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.110743] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449358} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.110994] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/f87f913f-9e6e-4d64-9fe1-0a1fc8564b46.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.111211] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.111457] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bca3ffd-d1ca-4519-8d70-5b4d2d962dba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.119370] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 996.119370] env[61998]: value = "task-1388906" [ 996.119370] env[61998]: _type = "Task" [ 996.119370] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.126851] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388906, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.226182] env[61998]: WARNING nova.network.neutron [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] 9b8c99a8-8481-43b6-bb09-1739b4f749c3 already exists in list: networks containing: ['9b8c99a8-8481-43b6-bb09-1739b4f749c3']. ignoring it [ 996.292914] env[61998]: DEBUG nova.scheduler.client.report [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 996.490485] env[61998]: DEBUG nova.network.neutron [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80f81837-014a-44f6-a2eb-6c05b1d74801", "address": "fa:16:3e:86:0a:3b", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80f81837-01", "ovs_interfaceid": "80f81837-014a-44f6-a2eb-6c05b1d74801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.629105] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388906, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062301} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.629441] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.630188] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59291987-9532-4648-81d6-f4027d513f47 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.653196] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/f87f913f-9e6e-4d64-9fe1-0a1fc8564b46.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.653784] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16781858-fc39-4e1d-a178-efab3f38ac33 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.672861] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 996.672861] env[61998]: value = "task-1388907" [ 996.672861] env[61998]: _type = "Task" [ 996.672861] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.680649] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388907, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.797777] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.798436] env[61998]: DEBUG nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 996.801344] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.386s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.801598] env[61998]: DEBUG nova.objects.instance [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lazy-loading 'resources' on Instance uuid 9de9cc49-7a81-4975-88df-5351125b180c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.993910] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.994649] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.994853] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.995249] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.995496] env[61998]: DEBUG nova.network.neutron [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing network info cache for port 80f81837-014a-44f6-a2eb-6c05b1d74801 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.997471] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8621706-4c66-4fb8-9eea-52c4a171ff71 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.016314] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.016764] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.017055] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.017357] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.018036] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.018036] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.018036] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.018264] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.018353] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.018523] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.018696] env[61998]: DEBUG nova.virt.hardware [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.024893] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfiguring VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 997.025798] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fc0f18a-d9a2-4f52-9fa3-17155a85946e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.042396] env[61998]: DEBUG oslo_vmware.api [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 997.042396] env[61998]: value = "task-1388908" [ 997.042396] env[61998]: _type = "Task" [ 997.042396] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.051449] env[61998]: DEBUG oslo_vmware.api [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388908, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.157055] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.157286] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.157468] env[61998]: DEBUG nova.network.neutron [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.182739] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388907, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.305123] env[61998]: DEBUG nova.compute.utils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 997.306757] env[61998]: DEBUG nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 997.306970] env[61998]: DEBUG nova.network.neutron [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 997.349809] env[61998]: DEBUG nova.policy [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7c2b0f1e1dd4b93862b0316ea6770a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5dc1064c95484fd4afd1de8243b72d55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 997.414743] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3791647-a332-4566-9b5f-7cc03ec803a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.422479] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a384ea40-6e17-49a8-ae9e-ed397dbe8426 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.454557] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fc65db-b382-47b6-b421-778c49188cdc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.462552] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b7dc5c-51fb-4888-b9f5-9af73f98eeef {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.476225] env[61998]: DEBUG nova.compute.provider_tree [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.552399] env[61998]: DEBUG oslo_vmware.api [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.619515] env[61998]: DEBUG nova.network.neutron [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Successfully created port: 33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 997.685662] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388907, 'name': ReconfigVM_Task, 'duration_secs': 0.963183} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.686076] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Reconfigured VM instance instance-0000005f to attach disk [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/f87f913f-9e6e-4d64-9fe1-0a1fc8564b46.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.686794] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7f2cefb-b788-4681-8f42-28d5320e8fba {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.696292] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 997.696292] env[61998]: value = "task-1388909" [ 997.696292] env[61998]: _type = "Task" [ 997.696292] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.709084] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388909, 'name': Rename_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.725017] env[61998]: DEBUG nova.network.neutron [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updated VIF entry in instance network info cache for port 80f81837-014a-44f6-a2eb-6c05b1d74801. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.725547] env[61998]: DEBUG nova.network.neutron [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80f81837-014a-44f6-a2eb-6c05b1d74801", "address": "fa:16:3e:86:0a:3b", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80f81837-01", "ovs_interfaceid": "80f81837-014a-44f6-a2eb-6c05b1d74801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.812992] env[61998]: DEBUG nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 997.980093] env[61998]: DEBUG nova.scheduler.client.report [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 998.052133] env[61998]: DEBUG oslo_vmware.api [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388908, 'name': ReconfigVM_Task, 'duration_secs': 0.793929} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.052918] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.053147] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfigured VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 998.104332] env[61998]: DEBUG nova.network.neutron [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance_info_cache with network_info: [{"id": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "address": "fa:16:3e:34:5d:81", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c7c097-27", "ovs_interfaceid": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.206146] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388909, 'name': Rename_Task, 'duration_secs': 0.148177} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.206483] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 998.206742] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0db52808-dc0e-4b90-94de-30b0e1c213a1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.213161] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 998.213161] env[61998]: value = "task-1388910" [ 998.213161] env[61998]: _type = "Task" [ 998.213161] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.220804] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388910, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.228590] env[61998]: DEBUG oslo_concurrency.lockutils [req-fe67c6b0-776b-40e0-a64c-bf6155c3125d req-25a04fa4-f86f-4e8a-9ccc-393beb06b3ef service nova] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.485752] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.488737] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.534s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.511686] env[61998]: INFO nova.scheduler.client.report [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted allocations for instance 9de9cc49-7a81-4975-88df-5351125b180c [ 998.557949] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f08dc19-c6d4-43cc-b42f-adf4a00dc1a2 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.912s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.607093] env[61998]: DEBUG oslo_concurrency.lockutils [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.722890] env[61998]: DEBUG oslo_vmware.api [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388910, 'name': PowerOnVM_Task, 'duration_secs': 0.439469} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.723326] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.723406] env[61998]: INFO nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Took 7.96 seconds to spawn the instance on the hypervisor. [ 998.723571] env[61998]: DEBUG nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 998.724413] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefa94c1-98cd-413b-9d9b-dbc2dfc3961e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.824836] env[61998]: DEBUG nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 998.849568] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 998.849877] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 998.850085] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.850359] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 998.850594] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.850799] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 998.851134] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 998.851365] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 998.851571] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 998.851777] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 998.852008] env[61998]: DEBUG nova.virt.hardware [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 998.852968] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04389b1-fd23-4cf3-bba7-b1dc9bc80e42 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.860949] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1490a3-7845-4779-8805-6825e84f4ca1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.018699] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c7161717-81ee-41dc-88c8-2e4d80834fed tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "9de9cc49-7a81-4975-88df-5351125b180c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.684s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.090700] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d99d92-eb8d-402f-a67c-58d821e9551a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.097713] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f9cc13-3ba4-4696-8fd1-92ac8ab268ce {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.131601] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd02c25-1f3a-4710-8709-b49c1abae843 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.140882] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca898b74-565e-46f0-a8b0-702eff277316 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.146092] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50548490-986e-4fd3-9ac5-cf1567d30399 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.158118] env[61998]: DEBUG nova.compute.provider_tree [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.176393] env[61998]: DEBUG nova.scheduler.client.report [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 999.183454] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bc02a3-810b-4198-b38c-a813ef2fea79 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.192199] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance '101d9d29-24b4-4c4d-bf7a-70abfd200be9' progress to 83 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 999.241597] env[61998]: INFO nova.compute.manager [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Took 12.73 seconds to build instance. [ 999.393027] env[61998]: DEBUG nova.compute.manager [req-b2cbaa61-ef41-42bb-a28a-d35755c43ed9 req-808f1075-8175-4107-b6a1-ee5a323ac235 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-vif-plugged-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 999.393027] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2cbaa61-ef41-42bb-a28a-d35755c43ed9 req-808f1075-8175-4107-b6a1-ee5a323ac235 service nova] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.393027] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2cbaa61-ef41-42bb-a28a-d35755c43ed9 req-808f1075-8175-4107-b6a1-ee5a323ac235 service nova] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.393027] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2cbaa61-ef41-42bb-a28a-d35755c43ed9 req-808f1075-8175-4107-b6a1-ee5a323ac235 service nova] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.393027] env[61998]: DEBUG nova.compute.manager [req-b2cbaa61-ef41-42bb-a28a-d35755c43ed9 req-808f1075-8175-4107-b6a1-ee5a323ac235 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] No waiting events found dispatching network-vif-plugged-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 999.393027] env[61998]: WARNING nova.compute.manager [req-b2cbaa61-ef41-42bb-a28a-d35755c43ed9 req-808f1075-8175-4107-b6a1-ee5a323ac235 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received unexpected event network-vif-plugged-33446d72-d352-428e-8a03-c36aaa61c776 for instance with vm_state building and task_state spawning. [ 999.501611] env[61998]: INFO nova.compute.manager [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Rescuing [ 999.502729] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.502729] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.502729] env[61998]: DEBUG nova.network.neutron [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.515564] env[61998]: DEBUG nova.network.neutron [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Successfully updated port: 33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 999.699187] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.700381] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9aadb0cb-9739-4868-9f91-8f1f2bba415a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.706683] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 999.706683] env[61998]: value = "task-1388911" [ 999.706683] env[61998]: _type = "Task" [ 999.706683] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.714405] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.746829] env[61998]: DEBUG oslo_concurrency.lockutils [None req-95adce03-bb5b-4c27-8ad0-2923175daf54 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.244s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.018271] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.019031] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.019249] env[61998]: DEBUG nova.network.neutron [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1000.188219] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 1.699s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.212030] env[61998]: DEBUG nova.network.neutron [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Updating instance_info_cache with network_info: [{"id": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "address": "fa:16:3e:39:10:78", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcbfafa6-90", "ovs_interfaceid": "dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.219172] env[61998]: DEBUG oslo_vmware.api [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388911, 'name': PowerOnVM_Task, 'duration_secs': 0.447172} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.220607] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.220607] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-fd182875-44da-41ea-8708-08afc37008e5 tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance '101d9d29-24b4-4c4d-bf7a-70abfd200be9' progress to 100 {{(pid=61998) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1000.272308] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-a1c20dc3-4d7b-4812-a552-f524de427e63" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.273074] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-a1c20dc3-4d7b-4812-a552-f524de427e63" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.273074] env[61998]: DEBUG nova.objects.instance [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'flavor' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.382852] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "a909297e-ac29-4630-a54b-abd0b6f67893" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.383295] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.549219] env[61998]: DEBUG nova.network.neutron [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1000.672082] env[61998]: DEBUG nova.network.neutron [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.716144] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "refresh_cache-f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.743719] env[61998]: INFO nova.scheduler.client.report [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted allocation for migration ab27e194-c511-4930-a465-c5fccf87b277 [ 1000.873552] env[61998]: DEBUG nova.objects.instance [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'pci_requests' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.885223] env[61998]: DEBUG nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1001.175145] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.175653] env[61998]: DEBUG nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Instance network_info: |[{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1001.176200] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:86:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33446d72-d352-428e-8a03-c36aaa61c776', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.184063] env[61998]: DEBUG oslo.service.loopingcall [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.184755] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.184963] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9a13b80-08db-48d3-b421-985a13e63250 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.207076] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.207076] env[61998]: value = "task-1388912" [ 1001.207076] env[61998]: _type = "Task" [ 1001.207076] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.215311] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388912, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.249767] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c62bab38-4300-4676-bdb1-3cd73f784f2e tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.834s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.352041] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "5789b2bc-a8c5-4986-bb53-7175cd566142" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.352643] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.352938] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.353240] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.353587] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.356139] env[61998]: INFO nova.compute.manager [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Terminating instance [ 1001.358363] env[61998]: DEBUG nova.compute.manager [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1001.358582] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.359512] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5eaf1e-b5c0-4c40-803d-06528841c104 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.368020] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.368346] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55c431b9-7165-412a-b6ba-a5189206f43a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.375071] env[61998]: DEBUG oslo_vmware.api [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1001.375071] env[61998]: value = "task-1388913" [ 1001.375071] env[61998]: _type = "Task" [ 1001.375071] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.375597] env[61998]: DEBUG nova.objects.base [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Object Instance<58626303-4d70-48bb-9aaf-1b54cef92a76> lazy-loaded attributes: flavor,pci_requests {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1001.375806] env[61998]: DEBUG nova.network.neutron [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1001.387674] env[61998]: DEBUG oslo_vmware.api [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388913, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.405689] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.406016] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.407556] env[61998]: INFO nova.compute.claims [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.465258] env[61998]: DEBUG nova.policy [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1001.491203] env[61998]: DEBUG nova.compute.manager [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-changed-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1001.491391] env[61998]: DEBUG nova.compute.manager [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing instance network info cache due to event network-changed-33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1001.491660] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.491792] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.491981] env[61998]: DEBUG nova.network.neutron [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.717804] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388912, 'name': CreateVM_Task, 'duration_secs': 0.336483} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.718153] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1001.718785] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.718999] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.719355] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1001.719621] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4d9387b-0072-4f40-9b0a-d9957ab0bcbc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.725174] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1001.725174] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523057ea-f7bc-4612-8326-c9f092e8b0a8" [ 1001.725174] env[61998]: _type = "Task" [ 1001.725174] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.738439] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523057ea-f7bc-4612-8326-c9f092e8b0a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.748299] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.748580] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-231f0b91-43e0-47d7-9516-33b49c304609 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.754686] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1001.754686] env[61998]: value = "task-1388914" [ 1001.754686] env[61998]: _type = "Task" [ 1001.754686] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.764067] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.768174] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "eb45dbc3-a972-4004-9c9a-9bd908b34723" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.768416] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.888160] env[61998]: DEBUG oslo_vmware.api [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388913, 'name': PowerOffVM_Task, 'duration_secs': 0.172695} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.888559] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.888641] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.888894] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d7893b2-0499-48f8-b107-63b6d215baee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.961310] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.961544] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.961742] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleting the datastore file [datastore1] 5789b2bc-a8c5-4986-bb53-7175cd566142 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.962022] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-599c7783-96ac-4ed3-beec-97d81b849578 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.968445] env[61998]: DEBUG oslo_vmware.api [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1001.968445] env[61998]: value = "task-1388916" [ 1001.968445] env[61998]: _type = "Task" [ 1001.968445] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.976206] env[61998]: DEBUG oslo_vmware.api [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.152896] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.153315] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.153551] env[61998]: DEBUG nova.compute.manager [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Going to confirm migration 3 {{(pid=61998) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5082}} [ 1002.192192] env[61998]: DEBUG nova.network.neutron [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updated VIF entry in instance network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1002.192569] env[61998]: DEBUG nova.network.neutron [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.236668] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523057ea-f7bc-4612-8326-c9f092e8b0a8, 'name': SearchDatastore_Task, 'duration_secs': 0.020365} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.236953] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.237218] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.237460] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.237611] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.237836] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.238192] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-284ca0c6-57ec-4ba5-9094-1bec25d38858 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.246409] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.246575] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.247335] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f71b8fb-cb52-4896-9485-68e0d7641369 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.252430] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1002.252430] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52119c6b-4a40-06b3-ff05-e9958377559f" [ 1002.252430] env[61998]: _type = "Task" [ 1002.252430] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.266465] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52119c6b-4a40-06b3-ff05-e9958377559f, 'name': SearchDatastore_Task, 'duration_secs': 0.00916} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.266688] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388914, 'name': PowerOffVM_Task, 'duration_secs': 0.185774} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.267449] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.267727] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f7644c9-ee4a-40d9-bc2a-c7692a342c94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.270241] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7671d1bd-76f8-4901-a3d1-97a28fc50151 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.274402] env[61998]: DEBUG nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1002.279959] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1002.279959] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b0136b-dc44-3b99-0b84-a2d2a4b40797" [ 1002.279959] env[61998]: _type = "Task" [ 1002.279959] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.297170] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bad64d-f4a2-4870-800f-70878fbf87f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.305365] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b0136b-dc44-3b99-0b84-a2d2a4b40797, 'name': SearchDatastore_Task, 'duration_secs': 0.010174} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.307856] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.308189] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.311100] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb2364b7-f48d-4386-9928-78362cefbde7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.318364] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1002.318364] env[61998]: value = "task-1388917" [ 1002.318364] env[61998]: _type = "Task" [ 1002.318364] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.326146] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388917, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.334539] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.334825] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcd309bf-2ff0-4bef-bb88-ff580632ab77 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.340930] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1002.340930] env[61998]: value = "task-1388918" [ 1002.340930] env[61998]: _type = "Task" [ 1002.340930] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.349024] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.478301] env[61998]: DEBUG oslo_vmware.api [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14009} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.480742] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.480944] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.481145] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.481323] env[61998]: INFO nova.compute.manager [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1002.481567] env[61998]: DEBUG oslo.service.loopingcall [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.482775] env[61998]: DEBUG nova.compute.manager [-] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1002.482891] env[61998]: DEBUG nova.network.neutron [-] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.527127] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7bb8a6-23c6-4ebd-8b91-dc59f4be4e86 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.535337] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f37811a-1807-4ac5-99fa-f0528fe1044e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.566726] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fd048c-0add-472a-9fbb-15ff65d706b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.574610] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84e520d-65d7-49fd-8ec3-97d3c61d6b51 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.588344] env[61998]: DEBUG nova.compute.provider_tree [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.694518] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.694518] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquired lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.694518] env[61998]: DEBUG nova.network.neutron [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.694518] env[61998]: DEBUG nova.objects.instance [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'info_cache' on Instance uuid 101d9d29-24b4-4c4d-bf7a-70abfd200be9 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.696085] env[61998]: DEBUG oslo_concurrency.lockutils [req-b2e2293c-5459-4cee-b150-629f72f5e75c req-a43d009f-c70d-4e4b-8a53-08ced2a82602 service nova] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.794225] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.829867] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388917, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.852443] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] VM already powered off {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1002.852705] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.852901] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.853067] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.853261] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.853520] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2627d62-cba7-4d2d-85b6-7ec608440ffe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.864949] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.865206] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.865980] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6a06bb9-09de-41ac-8cb1-1c49dd7f8008 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.873220] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1002.873220] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5259e556-6290-93af-87cb-1f2dc66bdf13" [ 1002.873220] env[61998]: _type = "Task" [ 1002.873220] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.881465] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5259e556-6290-93af-87cb-1f2dc66bdf13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.984570] env[61998]: DEBUG nova.network.neutron [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Successfully updated port: a1c20dc3-4d7b-4812-a552-f524de427e63 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.092530] env[61998]: DEBUG nova.scheduler.client.report [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1003.212345] env[61998]: DEBUG nova.network.neutron [-] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.329165] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388917, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696035} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.329445] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.329680] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.329927] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-866efa6a-2d3e-41f7-b6a9-1416a0f2d191 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.335802] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1003.335802] env[61998]: value = "task-1388919" [ 1003.335802] env[61998]: _type = "Task" [ 1003.335802] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.342818] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388919, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.384299] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5259e556-6290-93af-87cb-1f2dc66bdf13, 'name': SearchDatastore_Task, 'duration_secs': 0.01669} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.385077] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ca1f91-6231-448c-ae0a-a927ad3e549b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.389991] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1003.389991] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5231d627-5be4-349a-d6a4-1347bdd820c9" [ 1003.389991] env[61998]: _type = "Task" [ 1003.389991] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.399934] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5231d627-5be4-349a-d6a4-1347bdd820c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.487629] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.487812] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.488058] env[61998]: DEBUG nova.network.neutron [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.597498] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.598090] env[61998]: DEBUG nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1003.600624] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.807s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.604111] env[61998]: INFO nova.compute.claims [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.714344] env[61998]: INFO nova.compute.manager [-] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Took 1.23 seconds to deallocate network for instance. [ 1003.848154] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388919, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065787} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.848442] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.849241] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43dc594-3601-454a-928f-a838b9b02aef {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.870833] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.871569] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dd4da1c-9a4e-43dd-82bb-926543adc613 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.894900] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1003.894900] env[61998]: value = "task-1388920" [ 1003.894900] env[61998]: _type = "Task" [ 1003.894900] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.901414] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5231d627-5be4-349a-d6a4-1347bdd820c9, 'name': SearchDatastore_Task, 'duration_secs': 0.008465} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.901993] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.902245] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk. {{(pid=61998) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1003.902484] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56fa070c-aa81-47a2-941e-46bc3fa325d0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.907092] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388920, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.911248] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1003.911248] env[61998]: value = "task-1388921" [ 1003.911248] env[61998]: _type = "Task" [ 1003.911248] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.918868] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388921, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.920798] env[61998]: DEBUG nova.network.neutron [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance_info_cache with network_info: [{"id": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "address": "fa:16:3e:34:5d:81", "network": {"id": "82a6d525-2bd0-474f-9773-75aa67c33d67", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1560241014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82b8854f80cf48628167fd6f678d7dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec903a9-d773-4d7c-a80c-c2533be346fb", "external-id": "nsx-vlan-transportzone-208", "segmentation_id": 208, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6c7c097-27", "ovs_interfaceid": "e6c7c097-27f3-40b7-b085-a8b5f170210d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.025561] env[61998]: WARNING nova.network.neutron [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] 9b8c99a8-8481-43b6-bb09-1739b4f749c3 already exists in list: networks containing: ['9b8c99a8-8481-43b6-bb09-1739b4f749c3']. ignoring it [ 1004.025850] env[61998]: WARNING nova.network.neutron [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] 9b8c99a8-8481-43b6-bb09-1739b4f749c3 already exists in list: networks containing: ['9b8c99a8-8481-43b6-bb09-1739b4f749c3']. ignoring it [ 1004.107133] env[61998]: DEBUG nova.compute.utils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1004.112999] env[61998]: DEBUG nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1004.114467] env[61998]: DEBUG nova.network.neutron [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1004.179495] env[61998]: DEBUG nova.policy [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '744da696f7c64f62ae04195aa737fab4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c75c9b7c8d6b441d80fe512c37c88679', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1004.222018] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.408125] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388920, 'name': ReconfigVM_Task, 'duration_secs': 0.280941} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.408464] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.409218] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-056436d8-3713-46eb-8152-c861863fbd3f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.416890] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1004.416890] env[61998]: value = "task-1388922" [ 1004.416890] env[61998]: _type = "Task" [ 1004.416890] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.425380] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Releasing lock "refresh_cache-101d9d29-24b4-4c4d-bf7a-70abfd200be9" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.425693] env[61998]: DEBUG nova.objects.instance [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'migration_context' on Instance uuid 101d9d29-24b4-4c4d-bf7a-70abfd200be9 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.426841] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388921, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.434344] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388922, 'name': Rename_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.612420] env[61998]: DEBUG nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1004.676467] env[61998]: DEBUG nova.network.neutron [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Successfully created port: 549530e4-6c8b-4352-908f-473a6496a484 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1004.715010] env[61998]: DEBUG nova.network.neutron [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80f81837-014a-44f6-a2eb-6c05b1d74801", "address": "fa:16:3e:86:0a:3b", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80f81837-01", "ovs_interfaceid": "80f81837-014a-44f6-a2eb-6c05b1d74801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1c20dc3-4d7b-4812-a552-f524de427e63", "address": "fa:16:3e:f1:f5:60", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1c20dc3-4d", "ovs_interfaceid": "a1c20dc3-4d7b-4812-a552-f524de427e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.757785] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c67bb2-5b11-4f7d-af01-89ec1744a810 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.765556] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57577a32-4494-40a7-a078-1dc6a3b394e7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.794994] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325fd267-9751-4b8d-8cd3-438bfbd85565 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.801789] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7502f0-f3bf-4a96-ad20-48425f144142 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.814789] env[61998]: DEBUG nova.compute.provider_tree [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.923531] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388921, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80811} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.924173] env[61998]: INFO nova.virt.vmwareapi.ds_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk. [ 1004.924986] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91c1f89-336b-4353-a47a-13cd1cc65946 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.930487] env[61998]: DEBUG nova.objects.base [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Object Instance<101d9d29-24b4-4c4d-bf7a-70abfd200be9> lazy-loaded attributes: info_cache,migration_context {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1004.930777] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388922, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.931757] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1775d52c-0249-4d31-812f-c2a509a864ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.619405] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.627032] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.627032] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.627032] env[61998]: DEBUG nova.scheduler.client.report [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1005.632026] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.632921] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f6811d-f4f3-48fc-a931-c339fd4f1f31 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.638235] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8982d057-626d-4142-81ef-c958fac42e93 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.665025] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-459c3237-386f-427f-84cf-2b2fc853138c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.681637] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1005.681868] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1005.682038] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.682231] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1005.682381] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.682529] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1005.682733] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1005.682895] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1005.683081] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1005.683251] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1005.683426] env[61998]: DEBUG nova.virt.hardware [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1005.689540] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfiguring VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1005.690117] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388922, 'name': Rename_Task, 'duration_secs': 1.214673} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.691520] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f155bf9a-767d-4720-9b53-6f443e4df4a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.703121] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.703417] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1005.703417] env[61998]: value = "task-1388923" [ 1005.703417] env[61998]: _type = "Task" [ 1005.703417] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.704754] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-878af9cf-f617-4705-abdb-ab43f4154c0a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.706300] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 1005.706300] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c9b9bc-a65d-b4f1-b168-d6b06e36abb7" [ 1005.706300] env[61998]: _type = "Task" [ 1005.706300] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.713649] env[61998]: DEBUG oslo_vmware.api [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1005.713649] env[61998]: value = "task-1388924" [ 1005.713649] env[61998]: _type = "Task" [ 1005.713649] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.714994] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1005.714994] env[61998]: value = "task-1388925" [ 1005.714994] env[61998]: _type = "Task" [ 1005.714994] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.720537] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.726019] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c9b9bc-a65d-b4f1-b168-d6b06e36abb7, 'name': SearchDatastore_Task, 'duration_secs': 0.006132} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.726603] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.733634] env[61998]: DEBUG oslo_vmware.api [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388924, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.733866] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.133972] env[61998]: DEBUG nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1006.137095] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.137721] env[61998]: DEBUG nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1006.146027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.919s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.146027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.146027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.417s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.168590] env[61998]: INFO nova.scheduler.client.report [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted allocations for instance 5789b2bc-a8c5-4986-bb53-7175cd566142 [ 1006.180348] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1006.180816] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1006.180816] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.181097] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1006.182247] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.182247] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1006.182247] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1006.182247] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1006.182247] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1006.182247] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1006.182247] env[61998]: DEBUG nova.virt.hardware [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1006.183296] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d488f7eb-411b-4733-bc3f-0b370e56c4c8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.192100] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260572db-24c4-4701-9f3c-989384bdc6f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.198012] env[61998]: DEBUG nova.network.neutron [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Successfully updated port: 549530e4-6c8b-4352-908f-473a6496a484 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1006.231425] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388923, 'name': ReconfigVM_Task, 'duration_secs': 0.321363} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.235252] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Reconfigured VM instance instance-0000005f to attach disk [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.235975] env[61998]: DEBUG oslo_vmware.api [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388924, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.236813] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d580df-6f57-4615-8b8e-d2f7f3b9f2cb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.242736] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388925, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.266293] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb74b3a7-c244-4209-81e8-8aaa15d86d3f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.280233] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1006.280233] env[61998]: value = "task-1388926" [ 1006.280233] env[61998]: _type = "Task" [ 1006.280233] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.288204] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.647294] env[61998]: DEBUG nova.compute.utils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1006.652240] env[61998]: DEBUG nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1006.652402] env[61998]: DEBUG nova.network.neutron [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1006.681162] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7ca5842a-5a1b-471a-b0f3-c12f19907fde tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "5789b2bc-a8c5-4986-bb53-7175cd566142" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.328s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.690614] env[61998]: DEBUG nova.policy [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f155bbfca47547c2bf745811003ffcec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f49104f21d7147328bcc8edee8d3cdb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1006.700153] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "refresh_cache-a909297e-ac29-4630-a54b-abd0b6f67893" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.700153] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "refresh_cache-a909297e-ac29-4630-a54b-abd0b6f67893" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.700242] env[61998]: DEBUG nova.network.neutron [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.731803] env[61998]: DEBUG oslo_vmware.api [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388924, 'name': ReconfigVM_Task, 'duration_secs': 0.564352} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.732800] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.733035] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfigured VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1006.738892] env[61998]: DEBUG oslo_vmware.api [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1388925, 'name': PowerOnVM_Task, 'duration_secs': 0.537559} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.739371] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.739571] env[61998]: INFO nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1006.739755] env[61998]: DEBUG nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1006.740521] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92991792-dcea-45f1-bfa8-3033dde5e08b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.744954] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.755330] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811ab0a6-f3c3-45cd-ae04-f376bcd1950d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.762378] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf17c71-6447-4414-9b45-f2749342e482 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.797187] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22735025-19ac-4117-a2fa-ad12fdc0a02c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.805050] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.807972] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693de706-4d52-4938-aaa6-a5645be725f7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.821483] env[61998]: DEBUG nova.compute.provider_tree [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.928245] env[61998]: DEBUG nova.network.neutron [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Successfully created port: ba53e7e6-8720-4517-8d56-04a0c1784589 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1007.153552] env[61998]: DEBUG nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1007.241056] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9058f1d6-bd0e-4664-b603-849ec34b3f7a tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-a1c20dc3-4d7b-4812-a552-f524de427e63" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.968s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.250022] env[61998]: DEBUG nova.network.neutron [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1007.260423] env[61998]: INFO nova.compute.manager [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Took 12.68 seconds to build instance. [ 1007.303862] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.325235] env[61998]: DEBUG nova.scheduler.client.report [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1007.466683] env[61998]: DEBUG nova.network.neutron [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Updating instance_info_cache with network_info: [{"id": "549530e4-6c8b-4352-908f-473a6496a484", "address": "fa:16:3e:29:63:21", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap549530e4-6c", "ovs_interfaceid": "549530e4-6c8b-4352-908f-473a6496a484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.762436] env[61998]: DEBUG oslo_concurrency.lockutils [None req-7710accf-4320-4fd4-980a-c201915b161d tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.189s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.803965] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388926, 'name': ReconfigVM_Task, 'duration_secs': 1.131368} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.804253] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1007.804502] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff35aca0-9be0-453a-8836-62c2ea304084 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.810293] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1007.810293] env[61998]: value = "task-1388927" [ 1007.810293] env[61998]: _type = "Task" [ 1007.810293] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.818412] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.970809] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "refresh_cache-a909297e-ac29-4630-a54b-abd0b6f67893" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.971081] env[61998]: DEBUG nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Instance network_info: |[{"id": "549530e4-6c8b-4352-908f-473a6496a484", "address": "fa:16:3e:29:63:21", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap549530e4-6c", "ovs_interfaceid": "549530e4-6c8b-4352-908f-473a6496a484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1007.971559] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:63:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '549530e4-6c8b-4352-908f-473a6496a484', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.980621] env[61998]: DEBUG oslo.service.loopingcall [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.980621] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.980621] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a97e174-b757-44e0-9940-b346700330dc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.000159] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1008.000159] env[61998]: value = "task-1388928" [ 1008.000159] env[61998]: _type = "Task" [ 1008.000159] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.007836] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388928, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.165015] env[61998]: DEBUG nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1008.192233] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1008.192521] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1008.192682] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1008.192882] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1008.193043] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1008.193221] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1008.193534] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1008.193710] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1008.193912] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1008.194103] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1008.194297] env[61998]: DEBUG nova.virt.hardware [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1008.195181] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e491346a-13f1-4853-8fee-b7a2a7ccdd45 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.203599] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6dbe16-f8e9-447c-8876-b4fe0ae5b5a4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.320456] env[61998]: DEBUG oslo_vmware.api [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388927, 'name': PowerOnVM_Task, 'duration_secs': 0.454235} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.320879] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1008.323746] env[61998]: DEBUG nova.compute.manager [None req-f2342896-0d6a-44ea-8054-6e40d4e42813 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1008.324541] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d11d2f6-5f8d-4dc1-b82d-38f37faccbfb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.335481] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.192s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.344224] env[61998]: DEBUG nova.compute.manager [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Received event network-vif-deleted-755f7df1-f8ab-44a0-92fd-7ffcdc053632 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1008.344393] env[61998]: DEBUG nova.compute.manager [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-vif-plugged-a1c20dc3-4d7b-4812-a552-f524de427e63 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1008.344626] env[61998]: DEBUG oslo_concurrency.lockutils [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.344791] env[61998]: DEBUG oslo_concurrency.lockutils [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.344957] env[61998]: DEBUG oslo_concurrency.lockutils [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.345142] env[61998]: DEBUG nova.compute.manager [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] No waiting events found dispatching network-vif-plugged-a1c20dc3-4d7b-4812-a552-f524de427e63 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1008.345313] env[61998]: WARNING nova.compute.manager [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received unexpected event network-vif-plugged-a1c20dc3-4d7b-4812-a552-f524de427e63 for instance with vm_state active and task_state None. [ 1008.345476] env[61998]: DEBUG nova.compute.manager [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-changed-a1c20dc3-4d7b-4812-a552-f524de427e63 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1008.345634] env[61998]: DEBUG nova.compute.manager [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing instance network info cache due to event network-changed-a1c20dc3-4d7b-4812-a552-f524de427e63. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1008.345818] env[61998]: DEBUG oslo_concurrency.lockutils [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.345951] env[61998]: DEBUG oslo_concurrency.lockutils [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.346221] env[61998]: DEBUG nova.network.neutron [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Refreshing network info cache for port a1c20dc3-4d7b-4812-a552-f524de427e63 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.510391] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388928, 'name': CreateVM_Task, 'duration_secs': 0.411233} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.510562] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.511259] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.511430] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.511762] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.512029] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28e34a44-d40c-4548-ab24-79d7477f9681 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.516752] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1008.516752] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52be8913-c90e-33a7-0ea7-68e20f9e0fcf" [ 1008.516752] env[61998]: _type = "Task" [ 1008.516752] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.525162] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52be8913-c90e-33a7-0ea7-68e20f9e0fcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.903476] env[61998]: INFO nova.scheduler.client.report [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted allocation for migration 0a805f44-1861-4bad-a52c-51f18cb0e67c [ 1009.035597] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52be8913-c90e-33a7-0ea7-68e20f9e0fcf, 'name': SearchDatastore_Task, 'duration_secs': 0.009516} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.036019] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.036360] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.036701] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.036927] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.037224] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.037581] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1ea2a37-425c-40e5-9255-c1b106f3e7d2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.046399] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.046614] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1009.047645] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea9d8915-31a0-4f9a-83fa-1544cc3263c6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.054223] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1009.054223] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523a1cc1-1e89-6a02-e6c3-583cb50eed3b" [ 1009.054223] env[61998]: _type = "Task" [ 1009.054223] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.064324] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523a1cc1-1e89-6a02-e6c3-583cb50eed3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.161197] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-80f81837-014a-44f6-a2eb-6c05b1d74801" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.161510] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-80f81837-014a-44f6-a2eb-6c05b1d74801" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.220554] env[61998]: DEBUG nova.network.neutron [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Successfully updated port: ba53e7e6-8720-4517-8d56-04a0c1784589 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1009.384661] env[61998]: DEBUG nova.network.neutron [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updated VIF entry in instance network info cache for port a1c20dc3-4d7b-4812-a552-f524de427e63. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1009.385013] env[61998]: DEBUG nova.network.neutron [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80f81837-014a-44f6-a2eb-6c05b1d74801", "address": "fa:16:3e:86:0a:3b", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80f81837-01", "ovs_interfaceid": "80f81837-014a-44f6-a2eb-6c05b1d74801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1c20dc3-4d7b-4812-a552-f524de427e63", "address": "fa:16:3e:f1:f5:60", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1c20dc3-4d", "ovs_interfaceid": "a1c20dc3-4d7b-4812-a552-f524de427e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.408807] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.256s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.565011] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523a1cc1-1e89-6a02-e6c3-583cb50eed3b, 'name': SearchDatastore_Task, 'duration_secs': 0.008363} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.565968] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4961d9e-b30f-416c-8161-24a2dde10682 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.571147] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1009.571147] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5287cb4f-75c3-7eac-a4f6-e85be79b40a0" [ 1009.571147] env[61998]: _type = "Task" [ 1009.571147] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.579139] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5287cb4f-75c3-7eac-a4f6-e85be79b40a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.663961] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.664174] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.665193] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c86f4a-75cb-43a3-ae60-d704a48ae7d4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.683396] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1d3988-5487-4d78-9286-4c73ba69c40a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.711374] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfiguring VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1009.711662] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed4de2d1-f518-43e3-9317-e0474d22facd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.724836] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "refresh_cache-eb45dbc3-a972-4004-9c9a-9bd908b34723" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.724973] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "refresh_cache-eb45dbc3-a972-4004-9c9a-9bd908b34723" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.725132] env[61998]: DEBUG nova.network.neutron [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.731990] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1009.731990] env[61998]: value = "task-1388929" [ 1009.731990] env[61998]: _type = "Task" [ 1009.731990] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.740361] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.742858] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1009.888172] env[61998]: DEBUG oslo_concurrency.lockutils [req-01d3f5b1-b610-4276-b3f7-79879d63da4c req-eaf756bc-e129-459d-a671-3fc7d51e2efa service nova] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.081477] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5287cb4f-75c3-7eac-a4f6-e85be79b40a0, 'name': SearchDatastore_Task, 'duration_secs': 0.009347} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.081850] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.081980] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a909297e-ac29-4630-a54b-abd0b6f67893/a909297e-ac29-4630-a54b-abd0b6f67893.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1010.082253] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6da8f25-9aa5-4dc8-993a-a18a21ee4cf8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.088919] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1010.088919] env[61998]: value = "task-1388930" [ 1010.088919] env[61998]: _type = "Task" [ 1010.088919] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.096076] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.240860] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.245871] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.246094] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.246299] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.246399] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1010.247555] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deab7f37-e77d-4c1e-9ccb-0e3d8ea67b49 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.255563] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce443f5-1b6d-4425-8969-ff3de4bd78ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.272161] env[61998]: DEBUG nova.network.neutron [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1010.275010] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a056e91-7465-4d41-bb30-6c78e8f1b565 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.282423] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560078da-89d3-4351-8e55-dbffa013720a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.313676] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180693MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1010.313883] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.314113] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.365977] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.366281] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.378813] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Received event network-vif-plugged-549530e4-6c8b-4352-908f-473a6496a484 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1010.379047] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquiring lock "a909297e-ac29-4630-a54b-abd0b6f67893-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.379308] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Lock "a909297e-ac29-4630-a54b-abd0b6f67893-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.379514] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Lock "a909297e-ac29-4630-a54b-abd0b6f67893-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.379690] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] No waiting events found dispatching network-vif-plugged-549530e4-6c8b-4352-908f-473a6496a484 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1010.382021] env[61998]: WARNING nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Received unexpected event network-vif-plugged-549530e4-6c8b-4352-908f-473a6496a484 for instance with vm_state building and task_state spawning. [ 1010.382021] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Received event network-changed-549530e4-6c8b-4352-908f-473a6496a484 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1010.382021] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Refreshing instance network info cache due to event network-changed-549530e4-6c8b-4352-908f-473a6496a484. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1010.382021] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquiring lock "refresh_cache-a909297e-ac29-4630-a54b-abd0b6f67893" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.382021] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquired lock "refresh_cache-a909297e-ac29-4630-a54b-abd0b6f67893" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.382021] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Refreshing network info cache for port 549530e4-6c8b-4352-908f-473a6496a484 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.526761] env[61998]: DEBUG nova.network.neutron [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Updating instance_info_cache with network_info: [{"id": "ba53e7e6-8720-4517-8d56-04a0c1784589", "address": "fa:16:3e:c1:6a:35", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba53e7e6-87", "ovs_interfaceid": "ba53e7e6-8720-4517-8d56-04a0c1784589", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.599980] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388930, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.682656] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.682956] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.683194] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.683407] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.683596] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.685951] env[61998]: INFO nova.compute.manager [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Terminating instance [ 1010.687979] env[61998]: DEBUG nova.compute.manager [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1010.688269] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1010.689131] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db564d4-0b4b-46de-893f-2a6312debec9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.696571] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1010.696769] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12863e8b-8eb9-4379-b45c-a51a878d6cd8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.703110] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 1010.703110] env[61998]: value = "task-1388931" [ 1010.703110] env[61998]: _type = "Task" [ 1010.703110] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.712270] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.742037] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.869092] env[61998]: DEBUG nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1011.030012] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "refresh_cache-eb45dbc3-a972-4004-9c9a-9bd908b34723" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.030476] env[61998]: DEBUG nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Instance network_info: |[{"id": "ba53e7e6-8720-4517-8d56-04a0c1784589", "address": "fa:16:3e:c1:6a:35", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba53e7e6-87", "ovs_interfaceid": "ba53e7e6-8720-4517-8d56-04a0c1784589", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1011.030861] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:6a:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba53e7e6-8720-4517-8d56-04a0c1784589', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1011.038592] env[61998]: DEBUG oslo.service.loopingcall [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.039168] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1011.039459] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63813a07-97a8-408c-b029-8edcf62cb164 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.062266] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1011.062266] env[61998]: value = "task-1388932" [ 1011.062266] env[61998]: _type = "Task" [ 1011.062266] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.069928] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388932, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.089423] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Updated VIF entry in instance network info cache for port 549530e4-6c8b-4352-908f-473a6496a484. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.089831] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Updating instance_info_cache with network_info: [{"id": "549530e4-6c8b-4352-908f-473a6496a484", "address": "fa:16:3e:29:63:21", "network": {"id": "1900169c-e6b9-4bf2-96ba-783ef56dc920", "bridge": "br-int", "label": "tempest-ServersTestJSON-440251278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c75c9b7c8d6b441d80fe512c37c88679", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap549530e4-6c", "ovs_interfaceid": "549530e4-6c8b-4352-908f-473a6496a484", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.099854] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621732} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.100150] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a909297e-ac29-4630-a54b-abd0b6f67893/a909297e-ac29-4630-a54b-abd0b6f67893.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1011.100372] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1011.100637] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0280ee1d-d5c0-4fcc-abbb-c47dddb51ae1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.108048] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1011.108048] env[61998]: value = "task-1388933" [ 1011.108048] env[61998]: _type = "Task" [ 1011.108048] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.116949] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.213268] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388931, 'name': PowerOffVM_Task, 'duration_secs': 0.185664} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.213516] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1011.213689] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1011.213938] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce1d08a6-821f-4f99-bc06-a9b720c8a809 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.242618] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.284022] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.284273] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.284464] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleting the datastore file [datastore1] 101d9d29-24b4-4c4d-bf7a-70abfd200be9 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.284738] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3711b149-8712-4da5-a944-f877f99e865c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.292290] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for the task: (returnval){ [ 1011.292290] env[61998]: value = "task-1388935" [ 1011.292290] env[61998]: _type = "Task" [ 1011.292290] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.300113] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.343451] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 58626303-4d70-48bb-9aaf-1b54cef92a76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.343613] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 101d9d29-24b4-4c4d-bf7a-70abfd200be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.343739] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance f87f913f-9e6e-4d64-9fe1-0a1fc8564b46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.343858] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 4c41a59a-59d4-4abd-b173-118e759fc19c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.343971] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance a909297e-ac29-4630-a54b-abd0b6f67893 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.344100] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance eb45dbc3-a972-4004-9c9a-9bd908b34723 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.388998] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.571692] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388932, 'name': CreateVM_Task, 'duration_secs': 0.425776} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.571868] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.572573] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.572744] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.573078] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1011.573333] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5d3951c-6778-4600-807f-8a2981945e18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.577601] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1011.577601] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523a0715-b1d5-87cf-b24f-143fc1f0a4ce" [ 1011.577601] env[61998]: _type = "Task" [ 1011.577601] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.584860] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523a0715-b1d5-87cf-b24f-143fc1f0a4ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.595466] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Releasing lock "refresh_cache-a909297e-ac29-4630-a54b-abd0b6f67893" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.595712] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Received event network-vif-plugged-ba53e7e6-8720-4517-8d56-04a0c1784589 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1011.595906] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquiring lock "eb45dbc3-a972-4004-9c9a-9bd908b34723-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.596124] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.596294] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.596467] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] No waiting events found dispatching network-vif-plugged-ba53e7e6-8720-4517-8d56-04a0c1784589 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1011.596637] env[61998]: WARNING nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Received unexpected event network-vif-plugged-ba53e7e6-8720-4517-8d56-04a0c1784589 for instance with vm_state building and task_state spawning. [ 1011.596804] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Received event network-changed-ba53e7e6-8720-4517-8d56-04a0c1784589 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1011.596961] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Refreshing instance network info cache due to event network-changed-ba53e7e6-8720-4517-8d56-04a0c1784589. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1011.597162] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquiring lock "refresh_cache-eb45dbc3-a972-4004-9c9a-9bd908b34723" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.597303] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquired lock "refresh_cache-eb45dbc3-a972-4004-9c9a-9bd908b34723" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.597463] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Refreshing network info cache for port ba53e7e6-8720-4517-8d56-04a0c1784589 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1011.617058] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067685} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.617309] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.618102] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e59607c-c53d-422f-b553-298ee4389aed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.647854] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] a909297e-ac29-4630-a54b-abd0b6f67893/a909297e-ac29-4630-a54b-abd0b6f67893.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.648617] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1c0e74e-3ae3-4230-885d-cde488cc950e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.675697] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1011.675697] env[61998]: value = "task-1388936" [ 1011.675697] env[61998]: _type = "Task" [ 1011.675697] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.683378] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.742878] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.801749] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.846738] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.846983] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1011.847157] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1011.938055] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06cb683-e1eb-4483-b713-95d990744909 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.946090] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b871a0-b41b-43be-b6d0-c9546d0246a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.977091] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b0313f-97b3-470c-b4b3-91588c872e74 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.984364] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa530bc9-99c5-47d8-8362-1f36fc69b01d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.997378] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.087828] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523a0715-b1d5-87cf-b24f-143fc1f0a4ce, 'name': SearchDatastore_Task, 'duration_secs': 0.04675} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.088141] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.088389] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.088649] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.088803] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.088984] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.089248] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a53d92ae-6422-4320-8183-55f659b360de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.102230] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.102544] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.103855] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dececbd8-8645-4122-a23d-50c45cff74b7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.108537] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1012.108537] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b6318a-626b-493c-592f-43c17abf01cf" [ 1012.108537] env[61998]: _type = "Task" [ 1012.108537] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.115635] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b6318a-626b-493c-592f-43c17abf01cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.187819] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388936, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.243603] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.289545] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Updated VIF entry in instance network info cache for port ba53e7e6-8720-4517-8d56-04a0c1784589. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1012.289962] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Updating instance_info_cache with network_info: [{"id": "ba53e7e6-8720-4517-8d56-04a0c1784589", "address": "fa:16:3e:c1:6a:35", "network": {"id": "ae406e3d-2e15-4091-b840-925c362fffb1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1520273470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f49104f21d7147328bcc8edee8d3cdb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba53e7e6-87", "ovs_interfaceid": "ba53e7e6-8720-4517-8d56-04a0c1784589", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.302917] env[61998]: DEBUG oslo_vmware.api [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Task: {'id': task-1388935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.713833} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.303178] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.303362] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.303541] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.303715] env[61998]: INFO nova.compute.manager [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1012.303950] env[61998]: DEBUG oslo.service.loopingcall [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.304157] env[61998]: DEBUG nova.compute.manager [-] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1012.304251] env[61998]: DEBUG nova.network.neutron [-] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1012.500954] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1012.526315] env[61998]: DEBUG nova.compute.manager [req-a311fbe2-f9e7-4b7d-9c89-6a51000c0ffb req-723030f5-be32-4f07-8b5d-bdd860453d62 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Received event network-vif-deleted-e6c7c097-27f3-40b7-b085-a8b5f170210d {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1012.526740] env[61998]: INFO nova.compute.manager [req-a311fbe2-f9e7-4b7d-9c89-6a51000c0ffb req-723030f5-be32-4f07-8b5d-bdd860453d62 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Neutron deleted interface e6c7c097-27f3-40b7-b085-a8b5f170210d; detaching it from the instance and deleting it from the info cache [ 1012.526740] env[61998]: DEBUG nova.network.neutron [req-a311fbe2-f9e7-4b7d-9c89-6a51000c0ffb req-723030f5-be32-4f07-8b5d-bdd860453d62 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.618890] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b6318a-626b-493c-592f-43c17abf01cf, 'name': SearchDatastore_Task, 'duration_secs': 0.034147} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.619710] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d223dbb2-8e88-4717-a31e-6c4a1a24befe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.624881] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1012.624881] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]526bae02-d349-6707-fc6e-fad7694b74f7" [ 1012.624881] env[61998]: _type = "Task" [ 1012.624881] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.632499] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526bae02-d349-6707-fc6e-fad7694b74f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.685441] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388936, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.744544] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.792803] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Releasing lock "refresh_cache-eb45dbc3-a972-4004-9c9a-9bd908b34723" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.793014] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-changed-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1012.793204] env[61998]: DEBUG nova.compute.manager [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing instance network info cache due to event network-changed-33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1012.793421] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.793571] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.793738] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1013.008315] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1013.008617] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.694s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.008880] env[61998]: DEBUG nova.network.neutron [-] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.010260] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.621s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.011813] env[61998]: INFO nova.compute.claims [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.028474] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45960952-2baa-4735-9f57-6c153f708a54 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.037976] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5204f9-8a77-45d2-9558-7368e7ad8ed6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.063636] env[61998]: DEBUG nova.compute.manager [req-a311fbe2-f9e7-4b7d-9c89-6a51000c0ffb req-723030f5-be32-4f07-8b5d-bdd860453d62 service nova] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Detach interface failed, port_id=e6c7c097-27f3-40b7-b085-a8b5f170210d, reason: Instance 101d9d29-24b4-4c4d-bf7a-70abfd200be9 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1013.134643] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]526bae02-d349-6707-fc6e-fad7694b74f7, 'name': SearchDatastore_Task, 'duration_secs': 0.008392} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.134985] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.135183] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] eb45dbc3-a972-4004-9c9a-9bd908b34723/eb45dbc3-a972-4004-9c9a-9bd908b34723.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1013.135469] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08afd078-57ce-43a7-af3a-f791ae6e91de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.142677] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1013.142677] env[61998]: value = "task-1388937" [ 1013.142677] env[61998]: _type = "Task" [ 1013.142677] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.150176] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.186643] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388936, 'name': ReconfigVM_Task, 'duration_secs': 1.025049} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.186911] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Reconfigured VM instance instance-00000061 to attach disk [datastore2] a909297e-ac29-4630-a54b-abd0b6f67893/a909297e-ac29-4630-a54b-abd0b6f67893.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1013.187545] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e4a4615-93f7-4822-8765-5aaf6f05d96e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.194131] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1013.194131] env[61998]: value = "task-1388938" [ 1013.194131] env[61998]: _type = "Task" [ 1013.194131] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.202613] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388938, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.245870] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.515391] env[61998]: INFO nova.compute.manager [-] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Took 1.21 seconds to deallocate network for instance. [ 1013.550914] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updated VIF entry in instance network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.551587] env[61998]: DEBUG nova.network.neutron [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.653529] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448164} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.653864] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] eb45dbc3-a972-4004-9c9a-9bd908b34723/eb45dbc3-a972-4004-9c9a-9bd908b34723.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.654140] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.654435] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edf785a3-e6fb-4e0e-b106-fc3c9d542440 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.660809] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1013.660809] env[61998]: value = "task-1388939" [ 1013.660809] env[61998]: _type = "Task" [ 1013.660809] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.668628] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.703532] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388938, 'name': Rename_Task, 'duration_secs': 0.291681} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.703799] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.704063] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60d7d82c-86e5-4d21-ad1d-5004d7118a08 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.709841] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1013.709841] env[61998]: value = "task-1388940" [ 1013.709841] env[61998]: _type = "Task" [ 1013.709841] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.717680] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.746574] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.014854] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.015126] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.015281] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 1014.015403] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Rebuilding the list of instances to heal {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10231}} [ 1014.026112] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.054247] env[61998]: DEBUG oslo_concurrency.lockutils [req-ff5aa437-2ed3-4dc7-b529-7702e4fa6b12 req-5c92bb9d-1f16-46d3-aa98-37438b0a8606 service nova] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.116705] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2f3ea2-a7ad-4dcd-9d9b-b480e494d2aa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.124492] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef82a86f-8888-46e2-9f05-1cb2981d9b8c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.155159] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0677e74f-88f9-400b-846f-5289f454e7c6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.164970] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a819124a-70a2-4360-beba-017513cd5854 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.173325] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061504} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.180779] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.181268] env[61998]: DEBUG nova.compute.provider_tree [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.182927] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb3ee1c-e39e-457b-aaef-347689961638 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.204458] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] eb45dbc3-a972-4004-9c9a-9bd908b34723/eb45dbc3-a972-4004-9c9a-9bd908b34723.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.205195] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b2de9ab-21fc-4981-952d-107dd574ca09 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.227508] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388940, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.228720] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1014.228720] env[61998]: value = "task-1388941" [ 1014.228720] env[61998]: _type = "Task" [ 1014.228720] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.235985] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388941, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.245531] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.519442] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 1014.519647] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 1014.519746] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Skipping network cache update for instance because it is Building. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10240}} [ 1014.558720] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.558872] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.559034] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Forcefully refreshing network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1014.559197] env[61998]: DEBUG nova.objects.instance [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lazy-loading 'info_cache' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.686452] env[61998]: DEBUG nova.scheduler.client.report [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1014.731466] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388940, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.740166] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.749270] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.195008] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.185s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.195566] env[61998]: DEBUG nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1015.198155] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.172s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.198439] env[61998]: DEBUG nova.objects.instance [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lazy-loading 'resources' on Instance uuid 101d9d29-24b4-4c4d-bf7a-70abfd200be9 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.229880] env[61998]: DEBUG oslo_vmware.api [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388940, 'name': PowerOnVM_Task, 'duration_secs': 1.135859} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.230167] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1015.230382] env[61998]: INFO nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Took 9.10 seconds to spawn the instance on the hypervisor. [ 1015.230568] env[61998]: DEBUG nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1015.231929] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158e5e41-2367-4765-812e-3a5d53e7931d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.249831] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388941, 'name': ReconfigVM_Task, 'duration_secs': 0.70307} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.250368] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Reconfigured VM instance instance-00000062 to attach disk [datastore2] eb45dbc3-a972-4004-9c9a-9bd908b34723/eb45dbc3-a972-4004-9c9a-9bd908b34723.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.251317] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90510794-0d27-475f-91f4-825e1117091d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.256217] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.260187] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1015.260187] env[61998]: value = "task-1388942" [ 1015.260187] env[61998]: _type = "Task" [ 1015.260187] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.268491] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388942, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.702376] env[61998]: DEBUG nova.compute.utils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1015.704975] env[61998]: DEBUG nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1015.704975] env[61998]: DEBUG nova.network.neutron [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1015.751450] env[61998]: DEBUG nova.policy [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b474e6789a884ad8bef3e98832168ff2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2b8a39c23bc46008370ed877054464e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1015.766983] env[61998]: INFO nova.compute.manager [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Took 14.38 seconds to build instance. [ 1015.778695] env[61998]: DEBUG oslo_vmware.api [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388929, 'name': ReconfigVM_Task, 'duration_secs': 5.821242} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.783295] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.783680] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Reconfigured VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1015.793255] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388942, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.846728] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06927809-885a-4eea-ab14-bb0ea1f6b52d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.855037] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b794e1-f7eb-4fe4-b090-963910931a6f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.888948] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e27e7b-db16-4bf5-a934-367dae05e6ea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.897053] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7471576d-9a1d-4d46-b568-de2d6586b5fe {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.912275] env[61998]: DEBUG nova.compute.provider_tree [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.212023] env[61998]: DEBUG nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1016.243742] env[61998]: DEBUG nova.compute.manager [req-6e502d18-7122-4bab-88d1-733e89a01a19 req-f1dac8de-b0ce-4714-a163-e0f6a7d4af0c service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-vif-deleted-80f81837-014a-44f6-a2eb-6c05b1d74801 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1016.244026] env[61998]: INFO nova.compute.manager [req-6e502d18-7122-4bab-88d1-733e89a01a19 req-f1dac8de-b0ce-4714-a163-e0f6a7d4af0c service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Neutron deleted interface 80f81837-014a-44f6-a2eb-6c05b1d74801; detaching it from the instance and deleting it from the info cache [ 1016.244388] env[61998]: DEBUG nova.network.neutron [req-6e502d18-7122-4bab-88d1-733e89a01a19 req-f1dac8de-b0ce-4714-a163-e0f6a7d4af0c service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1c20dc3-4d7b-4812-a552-f524de427e63", "address": "fa:16:3e:f1:f5:60", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1c20dc3-4d", "ovs_interfaceid": "a1c20dc3-4d7b-4812-a552-f524de427e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.279835] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388942, 'name': Rename_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.280086] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3b2ac909-d696-4d43-aa4a-46c066602430 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.897s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.287096] env[61998]: DEBUG nova.network.neutron [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Successfully created port: c12a8246-774e-4a6e-8ff1-29a553b5f0ee {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1016.417275] env[61998]: DEBUG nova.scheduler.client.report [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1016.751452] env[61998]: DEBUG oslo_concurrency.lockutils [req-6e502d18-7122-4bab-88d1-733e89a01a19 req-f1dac8de-b0ce-4714-a163-e0f6a7d4af0c service nova] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.751648] env[61998]: DEBUG oslo_concurrency.lockutils [req-6e502d18-7122-4bab-88d1-733e89a01a19 req-f1dac8de-b0ce-4714-a163-e0f6a7d4af0c service nova] Acquired lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.754067] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f68045-86e1-4dde-b093-3ff8badc362b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.760391] env[61998]: DEBUG nova.compute.manager [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-vif-deleted-a1c20dc3-4d7b-4812-a552-f524de427e63 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1016.760589] env[61998]: INFO nova.compute.manager [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Neutron deleted interface a1c20dc3-4d7b-4812-a552-f524de427e63; detaching it from the instance and deleting it from the info cache [ 1016.760866] env[61998]: DEBUG nova.network.neutron [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.785235] env[61998]: DEBUG oslo_concurrency.lockutils [req-6e502d18-7122-4bab-88d1-733e89a01a19 req-f1dac8de-b0ce-4714-a163-e0f6a7d4af0c service nova] Releasing lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.785527] env[61998]: WARNING nova.compute.manager [req-6e502d18-7122-4bab-88d1-733e89a01a19 req-f1dac8de-b0ce-4714-a163-e0f6a7d4af0c service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Detach interface failed, port_id=80f81837-014a-44f6-a2eb-6c05b1d74801, reason: No device with interface-id 80f81837-014a-44f6-a2eb-6c05b1d74801 exists on VM: nova.exception.NotFound: No device with interface-id 80f81837-014a-44f6-a2eb-6c05b1d74801 exists on VM [ 1016.786772] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "80f81837-014a-44f6-a2eb-6c05b1d74801", "address": "fa:16:3e:86:0a:3b", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80f81837-01", "ovs_interfaceid": "80f81837-014a-44f6-a2eb-6c05b1d74801", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a1c20dc3-4d7b-4812-a552-f524de427e63", "address": "fa:16:3e:f1:f5:60", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1c20dc3-4d", "ovs_interfaceid": "a1c20dc3-4d7b-4812-a552-f524de427e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.793540] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388942, 'name': Rename_Task, 'duration_secs': 1.14577} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.793834] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.794102] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19696828-cd78-4026-bcf8-e441dba75374 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.800994] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1016.800994] env[61998]: value = "task-1388943" [ 1016.800994] env[61998]: _type = "Task" [ 1016.800994] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.809694] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388943, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.927029] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.949802] env[61998]: INFO nova.scheduler.client.report [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Deleted allocations for instance 101d9d29-24b4-4c4d-bf7a-70abfd200be9 [ 1017.153749] env[61998]: DEBUG oslo_concurrency.lockutils [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "a909297e-ac29-4630-a54b-abd0b6f67893" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.154062] env[61998]: DEBUG oslo_concurrency.lockutils [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.154274] env[61998]: DEBUG nova.compute.manager [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1017.156952] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77d0e53-a216-4b41-9605-6e051176dbd1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.162549] env[61998]: DEBUG nova.compute.manager [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61998) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3378}} [ 1017.163166] env[61998]: DEBUG nova.objects.instance [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lazy-loading 'flavor' on Instance uuid a909297e-ac29-4630-a54b-abd0b6f67893 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.213906] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.221542] env[61998]: DEBUG nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1017.236766] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.237323] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.237668] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.237967] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.238287] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.243781] env[61998]: INFO nova.compute.manager [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Terminating instance [ 1017.246340] env[61998]: DEBUG nova.compute.manager [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1017.246699] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1017.247808] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c6d13c-839f-4fb6-b61a-dee95e635b53 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.254823] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1017.255153] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1017.255401] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1017.255701] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1017.255930] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1017.256177] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1017.256504] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1017.256835] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1017.257039] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1017.257298] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1017.257576] env[61998]: DEBUG nova.virt.hardware [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1017.258691] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbff908-9877-4035-aac8-472a161f31e6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.271158] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5548d1a-17c1-4490-a854-f659bf7d11c2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.277205] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1017.277968] env[61998]: DEBUG oslo_concurrency.lockutils [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] Acquiring lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.278486] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb139d2a-4606-41c4-bb53-16ff07059bc2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.292838] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.293036] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updated the network info_cache for instance {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10298}} [ 1017.294866] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.295147] env[61998]: DEBUG nova.network.neutron [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.296684] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.297053] env[61998]: DEBUG oslo_vmware.api [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1017.297053] env[61998]: value = "task-1388944" [ 1017.297053] env[61998]: _type = "Task" [ 1017.297053] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.297519] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.297827] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.300997] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.301525] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.301676] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 1017.310709] env[61998]: DEBUG oslo_vmware.api [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388944, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.315921] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388943, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.459841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-e873e9ad-67fa-458c-9b46-ba5943d12dff tempest-DeleteServersTestJSON-1983871495 tempest-DeleteServersTestJSON-1983871495-project-member] Lock "101d9d29-24b4-4c4d-bf7a-70abfd200be9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.777s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.811864] env[61998]: DEBUG oslo_vmware.api [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388944, 'name': PowerOffVM_Task, 'duration_secs': 0.34354} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.814818] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.815025] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.815603] env[61998]: DEBUG oslo_vmware.api [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388943, 'name': PowerOnVM_Task, 'duration_secs': 0.524892} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.815815] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca955d7b-59ca-4b3c-874e-31e1dc150d20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.817380] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.817605] env[61998]: INFO nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1017.817809] env[61998]: DEBUG nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1017.818631] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ae87db-65ae-43da-a6cd-3e2bcd2bf328 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.965078] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.965344] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.965532] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleting the datastore file [datastore2] 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.965803] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36d18471-bae3-4958-a38f-9355177766ae {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.973594] env[61998]: DEBUG oslo_vmware.api [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1017.973594] env[61998]: value = "task-1388946" [ 1017.973594] env[61998]: _type = "Task" [ 1017.973594] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.982723] env[61998]: DEBUG oslo_vmware.api [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.076933] env[61998]: INFO nova.network.neutron [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Port 80f81837-014a-44f6-a2eb-6c05b1d74801 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1018.076933] env[61998]: INFO nova.network.neutron [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Port a1c20dc3-4d7b-4812-a552-f524de427e63 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1018.077228] env[61998]: DEBUG nova.network.neutron [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [{"id": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "address": "fa:16:3e:10:ca:da", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4cf5059-51", "ovs_interfaceid": "f4cf5059-51bc-4b7e-afa4-aab588228a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.126676] env[61998]: DEBUG nova.network.neutron [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Successfully updated port: c12a8246-774e-4a6e-8ff1-29a553b5f0ee {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1018.173018] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.173018] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23b6e3b0-9d5e-479e-b817-22ad4505876e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.179221] env[61998]: DEBUG oslo_vmware.api [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1018.179221] env[61998]: value = "task-1388947" [ 1018.179221] env[61998]: _type = "Task" [ 1018.179221] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.187994] env[61998]: DEBUG oslo_vmware.api [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.283482] env[61998]: DEBUG nova.compute.manager [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Received event network-vif-plugged-c12a8246-774e-4a6e-8ff1-29a553b5f0ee {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1018.283482] env[61998]: DEBUG oslo_concurrency.lockutils [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] Acquiring lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.283482] env[61998]: DEBUG oslo_concurrency.lockutils [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.283482] env[61998]: DEBUG oslo_concurrency.lockutils [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.284123] env[61998]: DEBUG nova.compute.manager [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] No waiting events found dispatching network-vif-plugged-c12a8246-774e-4a6e-8ff1-29a553b5f0ee {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1018.284644] env[61998]: WARNING nova.compute.manager [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Received unexpected event network-vif-plugged-c12a8246-774e-4a6e-8ff1-29a553b5f0ee for instance with vm_state building and task_state spawning. [ 1018.285081] env[61998]: DEBUG nova.compute.manager [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Received event network-changed-c12a8246-774e-4a6e-8ff1-29a553b5f0ee {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1018.285412] env[61998]: DEBUG nova.compute.manager [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Refreshing instance network info cache due to event network-changed-c12a8246-774e-4a6e-8ff1-29a553b5f0ee. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1018.285798] env[61998]: DEBUG oslo_concurrency.lockutils [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] Acquiring lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.286092] env[61998]: DEBUG oslo_concurrency.lockutils [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] Acquired lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.286418] env[61998]: DEBUG nova.network.neutron [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Refreshing network info cache for port c12a8246-774e-4a6e-8ff1-29a553b5f0ee {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.339723] env[61998]: INFO nova.compute.manager [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Took 15.56 seconds to build instance. [ 1018.485107] env[61998]: DEBUG oslo_vmware.api [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350894} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.485107] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.485206] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1018.485394] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1018.485667] env[61998]: INFO nova.compute.manager [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1018.486142] env[61998]: DEBUG oslo.service.loopingcall [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.486473] env[61998]: DEBUG nova.compute.manager [-] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1018.487604] env[61998]: DEBUG nova.network.neutron [-] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1018.551570] env[61998]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port a1c20dc3-4d7b-4812-a552-f524de427e63 could not be found.", "detail": ""}} {{(pid=61998) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1018.551785] env[61998]: DEBUG nova.network.neutron [-] Unable to show port a1c20dc3-4d7b-4812-a552-f524de427e63 as it no longer exists. {{(pid=61998) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1018.580022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.633858] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.689600] env[61998]: DEBUG oslo_vmware.api [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388947, 'name': PowerOffVM_Task, 'duration_secs': 0.261798} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.690186] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.690422] env[61998]: DEBUG nova.compute.manager [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1018.691913] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa0a6b9-06f2-4aa4-a318-804ce4279dfd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.823152] env[61998]: DEBUG nova.network.neutron [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1018.843498] env[61998]: DEBUG oslo_concurrency.lockutils [None req-15c6ff0b-7607-4e49-84b4-1b0915e3b444 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.075s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.907775] env[61998]: DEBUG nova.network.neutron [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.083962] env[61998]: DEBUG oslo_concurrency.lockutils [None req-f5e7f308-ce02-4c50-9e3b-702d46ad3cca tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-58626303-4d70-48bb-9aaf-1b54cef92a76-80f81837-014a-44f6-a2eb-6c05b1d74801" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.922s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.206071] env[61998]: DEBUG oslo_concurrency.lockutils [None req-650d890c-5844-4719-8995-4696364a54f8 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.052s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.413730] env[61998]: DEBUG oslo_concurrency.lockutils [req-377d75cc-b647-4a66-bd14-7a0032f1f26f req-8b7ec61d-69b1-48be-9203-bce1af0cf776 service nova] Releasing lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.414243] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.414384] env[61998]: DEBUG nova.network.neutron [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.683786] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "eb45dbc3-a972-4004-9c9a-9bd908b34723" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.684116] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.684346] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "eb45dbc3-a972-4004-9c9a-9bd908b34723-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.684534] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.684710] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.687355] env[61998]: INFO nova.compute.manager [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Terminating instance [ 1019.689551] env[61998]: DEBUG nova.compute.manager [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1019.689758] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1019.690779] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442db6d5-c477-4602-aa27-336c71a52498 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.699569] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.699809] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-960e45b9-033d-4f15-a161-a2a04fe006a2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.706577] env[61998]: DEBUG oslo_vmware.api [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1019.706577] env[61998]: value = "task-1388948" [ 1019.706577] env[61998]: _type = "Task" [ 1019.706577] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.714707] env[61998]: DEBUG oslo_vmware.api [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.781352] env[61998]: DEBUG nova.network.neutron [-] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.991200] env[61998]: DEBUG nova.network.neutron [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1020.150525] env[61998]: DEBUG nova.network.neutron [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Updating instance_info_cache with network_info: [{"id": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "address": "fa:16:3e:d7:94:a9", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12a8246-77", "ovs_interfaceid": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.217471] env[61998]: DEBUG oslo_vmware.api [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388948, 'name': PowerOffVM_Task, 'duration_secs': 0.19887} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.218699] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.218699] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.218699] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6b7910c-57cf-40ed-bfd1-1dbfb295f078 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.287590] env[61998]: INFO nova.compute.manager [-] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Took 1.80 seconds to deallocate network for instance. [ 1020.290660] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.291045] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.291330] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleting the datastore file [datastore2] eb45dbc3-a972-4004-9c9a-9bd908b34723 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.297141] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f9cc364-8b46-43d9-9f98-30022a6ebd3b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.301525] env[61998]: DEBUG oslo_vmware.api [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for the task: (returnval){ [ 1020.301525] env[61998]: value = "task-1388950" [ 1020.301525] env[61998]: _type = "Task" [ 1020.301525] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.310651] env[61998]: DEBUG oslo_vmware.api [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.318022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "a909297e-ac29-4630-a54b-abd0b6f67893" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.318022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.318022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "a909297e-ac29-4630-a54b-abd0b6f67893-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.318022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.318022] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.320479] env[61998]: DEBUG nova.compute.manager [req-6d490eae-3dc8-413b-8228-08b07bd93913 req-9de77dc9-2ac7-49fc-93d6-799cf2415575 service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Received event network-vif-deleted-f4cf5059-51bc-4b7e-afa4-aab588228a6c {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1020.321148] env[61998]: INFO nova.compute.manager [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Terminating instance [ 1020.323301] env[61998]: DEBUG nova.compute.manager [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1020.323622] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.324558] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882f4ac3-d55d-47db-869c-f589f692a2e0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.335015] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.335297] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa8dbd76-ad30-4735-b01c-72327cb48ca7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.444248] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.444514] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.444670] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleting the datastore file [datastore2] a909297e-ac29-4630-a54b-abd0b6f67893 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.444948] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-854a1c50-1de9-4388-9877-6ba033be13ea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.451749] env[61998]: DEBUG oslo_vmware.api [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for the task: (returnval){ [ 1020.451749] env[61998]: value = "task-1388952" [ 1020.451749] env[61998]: _type = "Task" [ 1020.451749] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.460148] env[61998]: DEBUG oslo_vmware.api [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.653740] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.654119] env[61998]: DEBUG nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Instance network_info: |[{"id": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "address": "fa:16:3e:d7:94:a9", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12a8246-77", "ovs_interfaceid": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1020.654626] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:94:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '605f83bd-808c-4b54-922e-54b14690987a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c12a8246-774e-4a6e-8ff1-29a553b5f0ee', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.662744] env[61998]: DEBUG oslo.service.loopingcall [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.662993] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.663253] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c07c43e7-8178-4862-b943-3a11b27e6612 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.682860] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.682860] env[61998]: value = "task-1388953" [ 1020.682860] env[61998]: _type = "Task" [ 1020.682860] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.690617] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388953, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.798140] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.798603] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.798871] env[61998]: DEBUG nova.objects.instance [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'resources' on Instance uuid 58626303-4d70-48bb-9aaf-1b54cef92a76 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.813040] env[61998]: DEBUG oslo_vmware.api [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Task: {'id': task-1388950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13428} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.813040] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.813040] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.813040] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.813221] env[61998]: INFO nova.compute.manager [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1020.813327] env[61998]: DEBUG oslo.service.loopingcall [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.813541] env[61998]: DEBUG nova.compute.manager [-] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1020.813639] env[61998]: DEBUG nova.network.neutron [-] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1020.962273] env[61998]: DEBUG oslo_vmware.api [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Task: {'id': task-1388952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137478} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.962580] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.962789] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.963030] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.963263] env[61998]: INFO nova.compute.manager [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1020.963515] env[61998]: DEBUG oslo.service.loopingcall [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.963717] env[61998]: DEBUG nova.compute.manager [-] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1020.963811] env[61998]: DEBUG nova.network.neutron [-] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.192888] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388953, 'name': CreateVM_Task, 'duration_secs': 0.406885} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.193224] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1021.193939] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.194133] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.194476] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1021.194745] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45a114de-6f39-4ceb-9903-2e7791135d5b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.199549] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1021.199549] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c90a06-6b13-752a-4fb9-86dd942d97af" [ 1021.199549] env[61998]: _type = "Task" [ 1021.199549] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.207507] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c90a06-6b13-752a-4fb9-86dd942d97af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.328743] env[61998]: DEBUG nova.compute.manager [req-bdda0ece-8138-4a43-bed0-b31ecb2383c7 req-ebeeb374-1c8a-425b-8a74-f788642b52cb service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Received event network-vif-deleted-549530e4-6c8b-4352-908f-473a6496a484 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1021.328989] env[61998]: INFO nova.compute.manager [req-bdda0ece-8138-4a43-bed0-b31ecb2383c7 req-ebeeb374-1c8a-425b-8a74-f788642b52cb service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Neutron deleted interface 549530e4-6c8b-4352-908f-473a6496a484; detaching it from the instance and deleting it from the info cache [ 1021.329253] env[61998]: DEBUG nova.network.neutron [req-bdda0ece-8138-4a43-bed0-b31ecb2383c7 req-ebeeb374-1c8a-425b-8a74-f788642b52cb service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.394686] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c605c18f-64f7-425b-bad4-60e3bf32abf9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.403920] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b12d945-14fa-4b58-bd47-747dc7ddeaaa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.434608] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228e2f0f-6de1-48fc-85e4-7cc49ca6fc16 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.442090] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc7b67a-e9af-4821-ae0a-4db419281f9a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.455171] env[61998]: DEBUG nova.compute.provider_tree [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.528983] env[61998]: DEBUG nova.network.neutron [-] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.709622] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c90a06-6b13-752a-4fb9-86dd942d97af, 'name': SearchDatastore_Task, 'duration_secs': 0.009069} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.709965] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.710228] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.710464] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.710618] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.710805] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.711094] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56829d38-33fb-4a19-84b2-95114e172598 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.719273] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.719436] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.720175] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e73495f4-27a4-47ce-8e78-ab27be0bb295 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.725109] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1021.725109] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5282580a-b64e-a9da-3b35-af834c610571" [ 1021.725109] env[61998]: _type = "Task" [ 1021.725109] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.732616] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5282580a-b64e-a9da-3b35-af834c610571, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.806927] env[61998]: DEBUG nova.network.neutron [-] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.832518] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0cbd3402-674b-446e-a2c2-bac15d41874a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.842057] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa1d42a-16e6-4c6d-832d-e45d24ec7b7a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.873505] env[61998]: DEBUG nova.compute.manager [req-bdda0ece-8138-4a43-bed0-b31ecb2383c7 req-ebeeb374-1c8a-425b-8a74-f788642b52cb service nova] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Detach interface failed, port_id=549530e4-6c8b-4352-908f-473a6496a484, reason: Instance a909297e-ac29-4630-a54b-abd0b6f67893 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1021.958017] env[61998]: DEBUG nova.scheduler.client.report [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1022.031233] env[61998]: INFO nova.compute.manager [-] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Took 1.22 seconds to deallocate network for instance. [ 1022.236420] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5282580a-b64e-a9da-3b35-af834c610571, 'name': SearchDatastore_Task, 'duration_secs': 0.007876} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.237907] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e6b6e0e-0748-498b-92c9-e38524ee74f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.244013] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1022.244013] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52ebb433-a0bb-2a23-fea5-4507c7e37a53" [ 1022.244013] env[61998]: _type = "Task" [ 1022.244013] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.250494] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52ebb433-a0bb-2a23-fea5-4507c7e37a53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.309531] env[61998]: INFO nova.compute.manager [-] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Took 1.35 seconds to deallocate network for instance. [ 1022.372030] env[61998]: DEBUG nova.compute.manager [req-9511c38d-54c7-461e-88e3-a0267d2c1237 req-cec118d5-c01c-4136-8724-982fb58343cb service nova] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Received event network-vif-deleted-ba53e7e6-8720-4517-8d56-04a0c1784589 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1022.459872] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "cd7775be-954c-4117-b9a9-763bbfb325c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.460129] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.466130] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.483058] env[61998]: INFO nova.scheduler.client.report [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted allocations for instance 58626303-4d70-48bb-9aaf-1b54cef92a76 [ 1022.538274] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.538759] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.538857] env[61998]: DEBUG nova.objects.instance [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lazy-loading 'resources' on Instance uuid eb45dbc3-a972-4004-9c9a-9bd908b34723 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.733649] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "a5785859-2a23-478d-b156-1817fbdcb313" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.733892] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "a5785859-2a23-478d-b156-1817fbdcb313" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.754596] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52ebb433-a0bb-2a23-fea5-4507c7e37a53, 'name': SearchDatastore_Task, 'duration_secs': 0.009827} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.754866] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.755138] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.755390] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6683615-28f9-48d6-9289-d63369b842ed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.762408] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1022.762408] env[61998]: value = "task-1388954" [ 1022.762408] env[61998]: _type = "Task" [ 1022.762408] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.772363] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.818515] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.965056] env[61998]: DEBUG nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1022.991230] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b62ac1db-8068-4537-ab7e-c7a04ba98eba tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "58626303-4d70-48bb-9aaf-1b54cef92a76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.754s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.992382] env[61998]: DEBUG oslo_concurrency.lockutils [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] Acquired lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.993544] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8969647-c32c-4ef8-9ee5-a91347b45ac6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.003881] env[61998]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1023.004074] env[61998]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=61998) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1023.004536] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16de38cb-f7ac-4483-bbb5-250d4404a4e7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.014743] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eccfda9-8572-42a6-a139-5432cf69ea01 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.044193] env[61998]: ERROR root [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-294772' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-294772' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-294772' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-294772'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-294772' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-294772' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-294772'}\n"]: nova.exception.InstanceNotFound: Instance 58626303-4d70-48bb-9aaf-1b54cef92a76 could not be found. [ 1023.044455] env[61998]: DEBUG oslo_concurrency.lockutils [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] Releasing lock "58626303-4d70-48bb-9aaf-1b54cef92a76" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.044626] env[61998]: DEBUG nova.compute.manager [req-06e6a5b6-2d3d-42e5-a41a-7e933edc8f4e req-8b51664f-9c0c-4d98-ace1-525a12f2586e service nova] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Detach interface failed, port_id=a1c20dc3-4d7b-4812-a552-f524de427e63, reason: Instance 58626303-4d70-48bb-9aaf-1b54cef92a76 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1023.070592] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "79a15d76-acc3-465d-9ab7-fa61a894affd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.070926] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.162917] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ee188b-a4af-473d-a940-8e952873b819 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.170980] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e935c417-634c-42dc-9914-9df6ecf9a3fc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.206317] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a283b18-ab04-4e19-b430-0a0462a461ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.213572] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a0dec0-0880-46f8-ace3-13813b6981fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.228979] env[61998]: DEBUG nova.compute.provider_tree [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.235989] env[61998]: DEBUG nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1023.272995] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388954, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45966} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.273279] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.273498] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.273743] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-946fc363-5dd6-44c2-83d2-75022093f20f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.279851] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1023.279851] env[61998]: value = "task-1388955" [ 1023.279851] env[61998]: _type = "Task" [ 1023.279851] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.287449] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388955, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.484103] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.573780] env[61998]: DEBUG nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1023.733550] env[61998]: DEBUG nova.scheduler.client.report [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1023.752990] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.789451] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388955, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071524} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.789887] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.790791] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa7ebf7-e1cb-4a2a-a3dd-2f68d1de2c15 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.813473] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.814129] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65561fb4-4e4a-435f-8e66-2096233612ca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.833265] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1023.833265] env[61998]: value = "task-1388956" [ 1023.833265] env[61998]: _type = "Task" [ 1023.833265] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.840820] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388956, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.093188] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.237905] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.240320] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.422s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.240582] env[61998]: DEBUG nova.objects.instance [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lazy-loading 'resources' on Instance uuid a909297e-ac29-4630-a54b-abd0b6f67893 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.262313] env[61998]: INFO nova.scheduler.client.report [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Deleted allocations for instance eb45dbc3-a972-4004-9c9a-9bd908b34723 [ 1024.342744] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388956, 'name': ReconfigVM_Task, 'duration_secs': 0.313533} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.343040] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.343666] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8444e0a-526d-49b7-8f01-7137143d823b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.349561] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1024.349561] env[61998]: value = "task-1388957" [ 1024.349561] env[61998]: _type = "Task" [ 1024.349561] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.359016] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388957, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.770419] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3d1a0c09-0681-40b3-8e52-e3c6e3a80567 tempest-ServerDiskConfigTestJSON-347226950 tempest-ServerDiskConfigTestJSON-347226950-project-member] Lock "eb45dbc3-a972-4004-9c9a-9bd908b34723" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.085s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.834226] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348e0ea7-66e5-41be-920e-62397001af69 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.842261] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062c6a93-277a-4b08-a03c-84d258a77298 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.876781] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1843b9e-2567-4de0-9b64-27b94107a09f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.884256] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388957, 'name': Rename_Task, 'duration_secs': 0.189411} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.886298] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.886536] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a82fda47-a45a-4d15-9f2a-2983fae1d04e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.888832] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b898731b-9671-484e-89a5-73d03ff6379f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.902902] env[61998]: DEBUG nova.compute.provider_tree [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.905066] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1024.905066] env[61998]: value = "task-1388958" [ 1024.905066] env[61998]: _type = "Task" [ 1024.905066] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.913905] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388958, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.392988] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.393346] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.406707] env[61998]: DEBUG nova.scheduler.client.report [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1025.426389] env[61998]: DEBUG oslo_vmware.api [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388958, 'name': PowerOnVM_Task, 'duration_secs': 0.44342} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.426707] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.426992] env[61998]: INFO nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1025.427272] env[61998]: DEBUG nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1025.428864] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9a4e30-e6e7-4ec4-9331-02625ac9dd0b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.896489] env[61998]: DEBUG nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1025.912543] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.672s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.914990] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.431s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.916984] env[61998]: INFO nova.compute.claims [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.946267] env[61998]: INFO nova.scheduler.client.report [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Deleted allocations for instance a909297e-ac29-4630-a54b-abd0b6f67893 [ 1025.963554] env[61998]: INFO nova.compute.manager [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Took 14.59 seconds to build instance. [ 1026.422457] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.468035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-93e0bd91-5b9d-4786-97d7-9a4b5c94026d tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.102s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.468503] env[61998]: DEBUG oslo_concurrency.lockutils [None req-33949829-a853-40f6-8d24-f55a346cbfd6 tempest-ServersTestJSON-729592905 tempest-ServersTestJSON-729592905-project-member] Lock "a909297e-ac29-4630-a54b-abd0b6f67893" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.153s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.039146] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e10f442-d5ad-43d4-8bfb-32e112642a22 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.046941] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd39abdc-8f42-4c8e-b054-771c2ba9cb3f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.081039] env[61998]: INFO nova.compute.manager [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Rescuing [ 1027.081167] env[61998]: DEBUG oslo_concurrency.lockutils [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.081312] env[61998]: DEBUG oslo_concurrency.lockutils [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.081490] env[61998]: DEBUG nova.network.neutron [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1027.084849] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af738e85-7377-4b6f-b336-50f1015777f1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.094376] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0fd8668-6ef4-40f5-913e-83bdd6a91049 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.108805] env[61998]: DEBUG nova.compute.provider_tree [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.611671] env[61998]: DEBUG nova.scheduler.client.report [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1027.899281] env[61998]: DEBUG nova.network.neutron [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Updating instance_info_cache with network_info: [{"id": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "address": "fa:16:3e:d7:94:a9", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12a8246-77", "ovs_interfaceid": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.116130] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.116757] env[61998]: DEBUG nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1028.119501] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.367s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.121203] env[61998]: INFO nova.compute.claims [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.401488] env[61998]: DEBUG oslo_concurrency.lockutils [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.625926] env[61998]: DEBUG nova.compute.utils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1028.629211] env[61998]: DEBUG nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1028.629355] env[61998]: DEBUG nova.network.neutron [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.675334] env[61998]: DEBUG nova.policy [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aabe7469c8fc4ffbb03d40fc9f57a773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7647715795e34176aebe4087ee3a3b42', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1028.934169] env[61998]: DEBUG nova.network.neutron [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Successfully created port: 4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.130764] env[61998]: DEBUG nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1029.231110] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c3e7de-4ab7-4e7d-bec8-8fa1f7682d77 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.238950] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f072dd02-db7d-46ca-9a2e-ece0d338bdf1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.270341] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f0ef06-7797-423d-bea9-087b74bea61c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.277591] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b27d165-58cc-4cee-a032-1f71cc701d5f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.291043] env[61998]: DEBUG nova.compute.provider_tree [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.437419] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.437754] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1e56b60-902b-4a2a-b86c-d458a39b90f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.445335] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1029.445335] env[61998]: value = "task-1388959" [ 1029.445335] env[61998]: _type = "Task" [ 1029.445335] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.454318] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.794359] env[61998]: DEBUG nova.scheduler.client.report [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1029.956106] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388959, 'name': PowerOffVM_Task, 'duration_secs': 0.220395} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.956357] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.957206] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e92e53a-044d-4cd3-8939-aaadddde7de3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.975644] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46130445-cc76-49f9-99a3-46ced80bbf25 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.008694] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.008986] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-325f6c55-b9c2-4f55-90e5-56c7532a3945 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.015780] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1030.015780] env[61998]: value = "task-1388960" [ 1030.015780] env[61998]: _type = "Task" [ 1030.015780] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.141810] env[61998]: DEBUG nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1030.162619] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1030.162763] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1030.162902] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1030.163105] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1030.163259] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1030.163525] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1030.163616] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1030.163776] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1030.163944] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1030.164129] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1030.164309] env[61998]: DEBUG nova.virt.hardware [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1030.165177] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82434e97-3895-4b5b-8f07-14859d52176c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.173469] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ee1262-e571-4530-8a36-881b7dafb7f8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.299826] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.180s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.300371] env[61998]: DEBUG nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1030.303124] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.210s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.304485] env[61998]: INFO nova.compute.claims [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1030.351088] env[61998]: DEBUG nova.compute.manager [req-d78925af-729b-4055-8222-28c021dabd93 req-022d9ce3-75d8-4180-b9b6-6af99132f8ef service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Received event network-vif-plugged-4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1030.351355] env[61998]: DEBUG oslo_concurrency.lockutils [req-d78925af-729b-4055-8222-28c021dabd93 req-022d9ce3-75d8-4180-b9b6-6af99132f8ef service nova] Acquiring lock "cd7775be-954c-4117-b9a9-763bbfb325c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.351570] env[61998]: DEBUG oslo_concurrency.lockutils [req-d78925af-729b-4055-8222-28c021dabd93 req-022d9ce3-75d8-4180-b9b6-6af99132f8ef service nova] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.351747] env[61998]: DEBUG oslo_concurrency.lockutils [req-d78925af-729b-4055-8222-28c021dabd93 req-022d9ce3-75d8-4180-b9b6-6af99132f8ef service nova] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.351923] env[61998]: DEBUG nova.compute.manager [req-d78925af-729b-4055-8222-28c021dabd93 req-022d9ce3-75d8-4180-b9b6-6af99132f8ef service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] No waiting events found dispatching network-vif-plugged-4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1030.354110] env[61998]: WARNING nova.compute.manager [req-d78925af-729b-4055-8222-28c021dabd93 req-022d9ce3-75d8-4180-b9b6-6af99132f8ef service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Received unexpected event network-vif-plugged-4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 for instance with vm_state building and task_state spawning. [ 1030.420474] env[61998]: DEBUG nova.network.neutron [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Successfully updated port: 4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.526290] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] VM already powered off {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1030.526490] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.526744] env[61998]: DEBUG oslo_concurrency.lockutils [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.526898] env[61998]: DEBUG oslo_concurrency.lockutils [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.527096] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.527341] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1bf358b-badd-4644-8015-3e0619711368 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.536889] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.537081] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.537751] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ac367d5-9ca5-4b81-8407-aa109e179a58 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.542519] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1030.542519] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5201070c-1cf7-421e-a370-a7202adc2b55" [ 1030.542519] env[61998]: _type = "Task" [ 1030.542519] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.549739] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5201070c-1cf7-421e-a370-a7202adc2b55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.809440] env[61998]: DEBUG nova.compute.utils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1030.813699] env[61998]: DEBUG nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1030.813875] env[61998]: DEBUG nova.network.neutron [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1030.853582] env[61998]: DEBUG nova.policy [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aabe7469c8fc4ffbb03d40fc9f57a773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7647715795e34176aebe4087ee3a3b42', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1030.922794] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.922940] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.923115] env[61998]: DEBUG nova.network.neutron [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.054228] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]5201070c-1cf7-421e-a370-a7202adc2b55, 'name': SearchDatastore_Task, 'duration_secs': 0.009068} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.055013] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6da34e89-51f7-4530-8d84-a03652955bb0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.060367] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1031.060367] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52beb9d9-ff5e-8937-2807-fcd4cdc32804" [ 1031.060367] env[61998]: _type = "Task" [ 1031.060367] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.068238] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52beb9d9-ff5e-8937-2807-fcd4cdc32804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.103143] env[61998]: DEBUG nova.network.neutron [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Successfully created port: 7fd5361b-02d8-4989-956f-b685ccb4431b {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.317693] env[61998]: DEBUG nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1031.419399] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3951092e-a89c-43c8-9093-959c89690067 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.429089] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156fe57d-eb74-4df0-9e8f-fb7148c4a7ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.459439] env[61998]: DEBUG nova.network.neutron [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.461629] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b09a87d-1b40-444e-8a57-8e8a17487149 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.469162] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322f5ce4-361e-42d8-a7ec-f12b838187e4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.482229] env[61998]: DEBUG nova.compute.provider_tree [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.571061] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52beb9d9-ff5e-8937-2807-fcd4cdc32804, 'name': SearchDatastore_Task, 'duration_secs': 0.00994} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.571305] env[61998]: DEBUG oslo_concurrency.lockutils [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.571565] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk. {{(pid=61998) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1031.571824] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a72e20f6-737d-4bea-ad1a-10471e49a36d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.578175] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1031.578175] env[61998]: value = "task-1388961" [ 1031.578175] env[61998]: _type = "Task" [ 1031.578175] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.585403] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.599033] env[61998]: DEBUG nova.network.neutron [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Updating instance_info_cache with network_info: [{"id": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "address": "fa:16:3e:13:d7:e5", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ac1ddd1-57", "ovs_interfaceid": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.988200] env[61998]: DEBUG nova.scheduler.client.report [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1032.089634] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388961, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460175} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.089916] env[61998]: INFO nova.virt.vmwareapi.ds_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk. [ 1032.090670] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fba6c61-e1b5-4ff7-9362-026a1759169a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.107803] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.108098] env[61998]: DEBUG nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Instance network_info: |[{"id": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "address": "fa:16:3e:13:d7:e5", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ac1ddd1-57", "ovs_interfaceid": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1032.115348] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1032.115738] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:d7:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.122744] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Creating folder: Project (7647715795e34176aebe4087ee3a3b42). Parent ref: group-v294665. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1032.122962] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-621bca29-94d0-4c01-a38e-64ab424144e2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.136525] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9a1e000-a2c4-4e70-8fb3-f8b7ba243d39 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.143499] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1032.143499] env[61998]: value = "task-1388963" [ 1032.143499] env[61998]: _type = "Task" [ 1032.143499] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.149074] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Created folder: Project (7647715795e34176aebe4087ee3a3b42) in parent group-v294665. [ 1032.149259] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Creating folder: Instances. Parent ref: group-v294788. {{(pid=61998) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1032.152136] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83426a09-ed30-41af-9c74-eb14d4c515f5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.153511] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388963, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.161962] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Created folder: Instances in parent group-v294788. [ 1032.162181] env[61998]: DEBUG oslo.service.loopingcall [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.162367] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.162554] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03296d88-507d-41d0-b2a0-54470a143b5d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.181664] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.181664] env[61998]: value = "task-1388965" [ 1032.181664] env[61998]: _type = "Task" [ 1032.181664] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.191611] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388965, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.327923] env[61998]: DEBUG nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.357015] env[61998]: DEBUG nova.virt.hardware [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.357720] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198c635b-d4cf-4574-85de-b505faacac22 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.365279] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8462668c-c7cf-4533-9756-1c3ee833dc41 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.387201] env[61998]: DEBUG nova.compute.manager [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Received event network-changed-4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1032.387472] env[61998]: DEBUG nova.compute.manager [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Refreshing instance network info cache due to event network-changed-4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1032.387710] env[61998]: DEBUG oslo_concurrency.lockutils [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] Acquiring lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.387867] env[61998]: DEBUG oslo_concurrency.lockutils [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] Acquired lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.388077] env[61998]: DEBUG nova.network.neutron [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Refreshing network info cache for port 4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.493016] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.190s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.493620] env[61998]: DEBUG nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1032.496496] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.075s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.498116] env[61998]: INFO nova.compute.claims [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.538052] env[61998]: DEBUG nova.compute.manager [req-f6680c34-9c78-4bba-a98f-ae1abac66b28 req-8374cc81-6d62-47b6-90d9-ba3aed109974 service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Received event network-vif-plugged-7fd5361b-02d8-4989-956f-b685ccb4431b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1032.538274] env[61998]: DEBUG oslo_concurrency.lockutils [req-f6680c34-9c78-4bba-a98f-ae1abac66b28 req-8374cc81-6d62-47b6-90d9-ba3aed109974 service nova] Acquiring lock "a5785859-2a23-478d-b156-1817fbdcb313-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.538487] env[61998]: DEBUG oslo_concurrency.lockutils [req-f6680c34-9c78-4bba-a98f-ae1abac66b28 req-8374cc81-6d62-47b6-90d9-ba3aed109974 service nova] Lock "a5785859-2a23-478d-b156-1817fbdcb313-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.538703] env[61998]: DEBUG oslo_concurrency.lockutils [req-f6680c34-9c78-4bba-a98f-ae1abac66b28 req-8374cc81-6d62-47b6-90d9-ba3aed109974 service nova] Lock "a5785859-2a23-478d-b156-1817fbdcb313-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.538897] env[61998]: DEBUG nova.compute.manager [req-f6680c34-9c78-4bba-a98f-ae1abac66b28 req-8374cc81-6d62-47b6-90d9-ba3aed109974 service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] No waiting events found dispatching network-vif-plugged-7fd5361b-02d8-4989-956f-b685ccb4431b {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.539081] env[61998]: WARNING nova.compute.manager [req-f6680c34-9c78-4bba-a98f-ae1abac66b28 req-8374cc81-6d62-47b6-90d9-ba3aed109974 service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Received unexpected event network-vif-plugged-7fd5361b-02d8-4989-956f-b685ccb4431b for instance with vm_state building and task_state spawning. [ 1032.653148] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388963, 'name': ReconfigVM_Task, 'duration_secs': 0.340041} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.653444] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b/a90c4a31-8bcc-48cf-ada7-7369ab14c460-rescue.vmdk or device None with type thin {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.654266] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888a5394-7f86-480e-9cec-252842f46fae {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.679822] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-909fd2a8-7e53-42ea-967a-9395a5cff79b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.698755] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388965, 'name': CreateVM_Task, 'duration_secs': 0.332794} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.699902] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.700233] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1032.700233] env[61998]: value = "task-1388966" [ 1032.700233] env[61998]: _type = "Task" [ 1032.700233] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.700822] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.700985] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.701327] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1032.701597] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d5ede7f-15fc-4966-88e6-28f6f0e98b75 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.707897] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1032.707897] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]528fa7ff-d241-0665-4325-721e17eaaee1" [ 1032.707897] env[61998]: _type = "Task" [ 1032.707897] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.710861] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388966, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.718451] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528fa7ff-d241-0665-4325-721e17eaaee1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.002223] env[61998]: DEBUG nova.compute.utils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1033.005786] env[61998]: DEBUG nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1033.005955] env[61998]: DEBUG nova.network.neutron [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1033.071046] env[61998]: DEBUG nova.policy [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aabe7469c8fc4ffbb03d40fc9f57a773', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7647715795e34176aebe4087ee3a3b42', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1033.104097] env[61998]: DEBUG nova.network.neutron [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Successfully updated port: 7fd5361b-02d8-4989-956f-b685ccb4431b {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1033.127360] env[61998]: DEBUG nova.network.neutron [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Updated VIF entry in instance network info cache for port 4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.127725] env[61998]: DEBUG nova.network.neutron [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Updating instance_info_cache with network_info: [{"id": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "address": "fa:16:3e:13:d7:e5", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ac1ddd1-57", "ovs_interfaceid": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.211277] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388966, 'name': ReconfigVM_Task, 'duration_secs': 0.283748} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.214286] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.214539] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d9d63e6-8c6d-4d6f-93e9-bb39058a6632 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.220921] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]528fa7ff-d241-0665-4325-721e17eaaee1, 'name': SearchDatastore_Task, 'duration_secs': 0.009109} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.221998] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.222253] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.222493] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.222646] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.222834] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.223136] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1033.223136] env[61998]: value = "task-1388967" [ 1033.223136] env[61998]: _type = "Task" [ 1033.223136] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.223320] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1477549-f55b-4591-8e18-6c610745ce5a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.232099] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.233444] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.233630] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.234287] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26fd8816-f0ae-487b-8e5f-7165d40fe984 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.238881] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1033.238881] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]520210ba-c03b-3299-d66b-ddd540833da5" [ 1033.238881] env[61998]: _type = "Task" [ 1033.238881] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.246148] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]520210ba-c03b-3299-d66b-ddd540833da5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.353219] env[61998]: DEBUG nova.network.neutron [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Successfully created port: cb32d9ba-0c75-427c-a8fa-77deb54fcb76 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.511766] env[61998]: DEBUG nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1033.606986] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "refresh_cache-a5785859-2a23-478d-b156-1817fbdcb313" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.607161] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "refresh_cache-a5785859-2a23-478d-b156-1817fbdcb313" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.607316] env[61998]: DEBUG nova.network.neutron [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.609707] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803ac119-01d8-4a83-b01c-35c65edb9554 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.617588] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6be71ca-b5d2-419a-ba44-c606c519ff12 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.649990] env[61998]: DEBUG oslo_concurrency.lockutils [req-797924b0-fb7a-4c25-9327-741260e7ad47 req-b56f3876-eefb-4a06-b588-8b25a9bffe0a service nova] Releasing lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.651133] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80f1112-5710-4a98-a3ce-0a167b8c0eb1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.659191] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503a6ea9-7a36-4bc1-bb36-c09b0b61eace {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.672749] env[61998]: DEBUG nova.compute.provider_tree [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.734433] env[61998]: DEBUG oslo_vmware.api [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388967, 'name': PowerOnVM_Task, 'duration_secs': 0.470936} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.734663] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.738317] env[61998]: DEBUG nova.compute.manager [None req-77d5ce18-b3f2-4756-843f-446e4be73eeb tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1033.739330] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26beb8b5-7813-479a-9a80-41bca171da01 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.749389] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]520210ba-c03b-3299-d66b-ddd540833da5, 'name': SearchDatastore_Task, 'duration_secs': 0.00902} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.753126] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cedf7710-53b7-4a8f-9d85-f4b76bd9aab7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.758299] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1033.758299] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]525e3f38-8781-905b-e07d-5cc8b75ffb3a" [ 1033.758299] env[61998]: _type = "Task" [ 1033.758299] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.765824] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525e3f38-8781-905b-e07d-5cc8b75ffb3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.161534] env[61998]: DEBUG nova.network.neutron [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1034.175553] env[61998]: DEBUG nova.scheduler.client.report [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1034.273232] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]525e3f38-8781-905b-e07d-5cc8b75ffb3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.273528] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.273803] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] cd7775be-954c-4117-b9a9-763bbfb325c4/cd7775be-954c-4117-b9a9-763bbfb325c4.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.274070] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f596952-686e-458d-b4a0-3540813fda2f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.283674] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1034.283674] env[61998]: value = "task-1388968" [ 1034.283674] env[61998]: _type = "Task" [ 1034.283674] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.289402] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.337555] env[61998]: DEBUG nova.network.neutron [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Updating instance_info_cache with network_info: [{"id": "7fd5361b-02d8-4989-956f-b685ccb4431b", "address": "fa:16:3e:a3:7a:bb", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd5361b-02", "ovs_interfaceid": "7fd5361b-02d8-4989-956f-b685ccb4431b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.414247] env[61998]: DEBUG nova.compute.manager [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Received event network-changed-7fd5361b-02d8-4989-956f-b685ccb4431b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1034.414473] env[61998]: DEBUG nova.compute.manager [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Refreshing instance network info cache due to event network-changed-7fd5361b-02d8-4989-956f-b685ccb4431b. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1034.414698] env[61998]: DEBUG oslo_concurrency.lockutils [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] Acquiring lock "refresh_cache-a5785859-2a23-478d-b156-1817fbdcb313" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.522628] env[61998]: DEBUG nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1034.550565] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1034.550850] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1034.551030] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.551225] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1034.551381] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.551536] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1034.551760] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1034.551924] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1034.552113] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1034.552286] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1034.552469] env[61998]: DEBUG nova.virt.hardware [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1034.553432] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14859ed6-0949-41ec-962f-1b2e609259e5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.561931] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8562f8-fed6-4500-9b1f-7ea347fda27f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.680804] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.184s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.681403] env[61998]: DEBUG nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1034.754320] env[61998]: DEBUG nova.compute.manager [req-049c1a99-e19e-49e7-9b2f-fa3012692811 req-5f24a7fe-dc9a-4f07-8d74-9b4aab8a15cf service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Received event network-vif-plugged-cb32d9ba-0c75-427c-a8fa-77deb54fcb76 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1034.754612] env[61998]: DEBUG oslo_concurrency.lockutils [req-049c1a99-e19e-49e7-9b2f-fa3012692811 req-5f24a7fe-dc9a-4f07-8d74-9b4aab8a15cf service nova] Acquiring lock "79a15d76-acc3-465d-9ab7-fa61a894affd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.754893] env[61998]: DEBUG oslo_concurrency.lockutils [req-049c1a99-e19e-49e7-9b2f-fa3012692811 req-5f24a7fe-dc9a-4f07-8d74-9b4aab8a15cf service nova] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.755522] env[61998]: DEBUG oslo_concurrency.lockutils [req-049c1a99-e19e-49e7-9b2f-fa3012692811 req-5f24a7fe-dc9a-4f07-8d74-9b4aab8a15cf service nova] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.755915] env[61998]: DEBUG nova.compute.manager [req-049c1a99-e19e-49e7-9b2f-fa3012692811 req-5f24a7fe-dc9a-4f07-8d74-9b4aab8a15cf service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] No waiting events found dispatching network-vif-plugged-cb32d9ba-0c75-427c-a8fa-77deb54fcb76 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.756159] env[61998]: WARNING nova.compute.manager [req-049c1a99-e19e-49e7-9b2f-fa3012692811 req-5f24a7fe-dc9a-4f07-8d74-9b4aab8a15cf service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Received unexpected event network-vif-plugged-cb32d9ba-0c75-427c-a8fa-77deb54fcb76 for instance with vm_state building and task_state spawning. [ 1034.793065] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388968, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.840442] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "refresh_cache-a5785859-2a23-478d-b156-1817fbdcb313" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.840810] env[61998]: DEBUG nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Instance network_info: |[{"id": "7fd5361b-02d8-4989-956f-b685ccb4431b", "address": "fa:16:3e:a3:7a:bb", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd5361b-02", "ovs_interfaceid": "7fd5361b-02d8-4989-956f-b685ccb4431b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1034.841175] env[61998]: DEBUG oslo_concurrency.lockutils [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] Acquired lock "refresh_cache-a5785859-2a23-478d-b156-1817fbdcb313" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.841367] env[61998]: DEBUG nova.network.neutron [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Refreshing network info cache for port 7fd5361b-02d8-4989-956f-b685ccb4431b {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.842721] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:7a:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7fd5361b-02d8-4989-956f-b685ccb4431b', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.857278] env[61998]: DEBUG oslo.service.loopingcall [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.858174] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.858533] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38771ade-d3e4-4ef9-b7ac-865d5c1419ce {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.883986] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.883986] env[61998]: value = "task-1388969" [ 1034.883986] env[61998]: _type = "Task" [ 1034.883986] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.892943] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388969, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.136592] env[61998]: INFO nova.compute.manager [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Unrescuing [ 1035.136933] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.137142] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquired lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.137751] env[61998]: DEBUG nova.network.neutron [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.186534] env[61998]: DEBUG nova.compute.utils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1035.188010] env[61998]: DEBUG nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1035.189193] env[61998]: DEBUG nova.network.neutron [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.232681] env[61998]: DEBUG nova.policy [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1035.293643] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655961} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.294084] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] cd7775be-954c-4117-b9a9-763bbfb325c4/cd7775be-954c-4117-b9a9-763bbfb325c4.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.294343] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.294550] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-367927fa-09da-4684-8f9f-077030586987 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.302081] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1035.302081] env[61998]: value = "task-1388970" [ 1035.302081] env[61998]: _type = "Task" [ 1035.302081] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.311694] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.333649] env[61998]: DEBUG nova.network.neutron [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Successfully updated port: cb32d9ba-0c75-427c-a8fa-77deb54fcb76 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.362723] env[61998]: DEBUG nova.compute.manager [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Received event network-changed-cb32d9ba-0c75-427c-a8fa-77deb54fcb76 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1035.362972] env[61998]: DEBUG nova.compute.manager [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Refreshing instance network info cache due to event network-changed-cb32d9ba-0c75-427c-a8fa-77deb54fcb76. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1035.363283] env[61998]: DEBUG oslo_concurrency.lockutils [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] Acquiring lock "refresh_cache-79a15d76-acc3-465d-9ab7-fa61a894affd" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.363510] env[61998]: DEBUG oslo_concurrency.lockutils [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] Acquired lock "refresh_cache-79a15d76-acc3-465d-9ab7-fa61a894affd" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.363745] env[61998]: DEBUG nova.network.neutron [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Refreshing network info cache for port cb32d9ba-0c75-427c-a8fa-77deb54fcb76 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.395948] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388969, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.685957] env[61998]: DEBUG nova.network.neutron [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Successfully created port: 723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.688852] env[61998]: DEBUG nova.network.neutron [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Updated VIF entry in instance network info cache for port 7fd5361b-02d8-4989-956f-b685ccb4431b. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.689040] env[61998]: DEBUG nova.network.neutron [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Updating instance_info_cache with network_info: [{"id": "7fd5361b-02d8-4989-956f-b685ccb4431b", "address": "fa:16:3e:a3:7a:bb", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fd5361b-02", "ovs_interfaceid": "7fd5361b-02d8-4989-956f-b685ccb4431b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.692023] env[61998]: DEBUG nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1035.814199] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.212152} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.814495] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.815380] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3679912b-6203-4412-9729-44dd38ed950a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.839464] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] cd7775be-954c-4117-b9a9-763bbfb325c4/cd7775be-954c-4117-b9a9-763bbfb325c4.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.839464] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9e9d5da-6cb5-4228-b449-c4a0acef2ce7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.852509] env[61998]: DEBUG nova.network.neutron [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Updating instance_info_cache with network_info: [{"id": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "address": "fa:16:3e:d7:94:a9", "network": {"id": "eb61f6e6-e4f1-4abc-9153-1b2f61641b6e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1941982145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f2b8a39c23bc46008370ed877054464e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc12a8246-77", "ovs_interfaceid": "c12a8246-774e-4a6e-8ff1-29a553b5f0ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.854132] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "refresh_cache-79a15d76-acc3-465d-9ab7-fa61a894affd" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.863242] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1035.863242] env[61998]: value = "task-1388971" [ 1035.863242] env[61998]: _type = "Task" [ 1035.863242] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.874934] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.896634] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388969, 'name': CreateVM_Task, 'duration_secs': 0.516536} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.897552] env[61998]: DEBUG nova.network.neutron [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.899337] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.900055] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.900255] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.900654] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1035.901536] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d17efb2c-314a-4d00-9f86-896b885a72a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.906702] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1035.906702] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52ac4e0e-9420-9906-7054-9c9352eb0bc5" [ 1035.906702] env[61998]: _type = "Task" [ 1035.906702] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.915062] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52ac4e0e-9420-9906-7054-9c9352eb0bc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.979088] env[61998]: DEBUG nova.network.neutron [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.191791] env[61998]: DEBUG oslo_concurrency.lockutils [req-d393675c-641d-49b8-b478-dd5b7474b0ed req-e2453109-c340-453a-a6af-af00ecc1516d service nova] Releasing lock "refresh_cache-a5785859-2a23-478d-b156-1817fbdcb313" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.355817] env[61998]: DEBUG oslo_concurrency.lockutils [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Releasing lock "refresh_cache-9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.356642] env[61998]: DEBUG nova.objects.instance [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lazy-loading 'flavor' on Instance uuid 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.373588] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.417834] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52ac4e0e-9420-9906-7054-9c9352eb0bc5, 'name': SearchDatastore_Task, 'duration_secs': 0.011978} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.418166] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.418459] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.418723] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.418873] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.419075] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.419354] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af811f0e-7eaf-47cc-9669-1416f05487f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.428446] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.428691] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.429409] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93f6e152-482d-44a3-ba16-2ade1c2a6fa0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.435290] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1036.435290] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa7cfb-2f8b-725d-9a38-a12ee19c0c80" [ 1036.435290] env[61998]: _type = "Task" [ 1036.435290] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.444739] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa7cfb-2f8b-725d-9a38-a12ee19c0c80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.481666] env[61998]: DEBUG oslo_concurrency.lockutils [req-328c63e6-f757-4d31-b40b-e9d58796be93 req-9af14860-feeb-41d9-9a26-37bdd60a6308 service nova] Releasing lock "refresh_cache-79a15d76-acc3-465d-9ab7-fa61a894affd" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.482093] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "refresh_cache-79a15d76-acc3-465d-9ab7-fa61a894affd" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.482358] env[61998]: DEBUG nova.network.neutron [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1036.701379] env[61998]: DEBUG nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1036.726460] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1036.726745] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1036.726905] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.727104] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1036.727260] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.727410] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1036.727621] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1036.727781] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1036.727951] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1036.728131] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1036.728305] env[61998]: DEBUG nova.virt.hardware [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1036.729197] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b413ef7e-5e6f-4006-a593-117ef313035e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.737482] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358c18cf-2386-4b73-abba-de804a098c8b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.862743] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e250e7-1458-480c-9c4c-5cbc6e5ce243 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.874435] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388971, 'name': ReconfigVM_Task, 'duration_secs': 0.610034} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.890562] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Reconfigured VM instance instance-00000064 to attach disk [datastore2] cd7775be-954c-4117-b9a9-763bbfb325c4/cd7775be-954c-4117-b9a9-763bbfb325c4.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.891303] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.891657] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-68d2ce53-a33c-45fc-a347-bdce95f2c150 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.893291] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18a3a10b-4a12-4742-9c55-fa3e209f15b4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.901016] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1036.901016] env[61998]: value = "task-1388973" [ 1036.901016] env[61998]: _type = "Task" [ 1036.901016] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.902357] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1036.902357] env[61998]: value = "task-1388972" [ 1036.902357] env[61998]: _type = "Task" [ 1036.902357] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.916376] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.919341] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388973, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.946243] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52aa7cfb-2f8b-725d-9a38-a12ee19c0c80, 'name': SearchDatastore_Task, 'duration_secs': 0.010912} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.947183] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc621cd6-aed4-42b8-b8da-a10991269229 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.954482] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1036.954482] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cb9f40-62d6-621d-a43d-544deefb2531" [ 1036.954482] env[61998]: _type = "Task" [ 1036.954482] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.964784] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cb9f40-62d6-621d-a43d-544deefb2531, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.014030] env[61998]: DEBUG nova.network.neutron [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1037.156716] env[61998]: DEBUG nova.network.neutron [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Updating instance_info_cache with network_info: [{"id": "cb32d9ba-0c75-427c-a8fa-77deb54fcb76", "address": "fa:16:3e:80:d0:82", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb32d9ba-0c", "ovs_interfaceid": "cb32d9ba-0c75-427c-a8fa-77deb54fcb76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.234686] env[61998]: DEBUG nova.network.neutron [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Successfully updated port: 723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.385847] env[61998]: DEBUG nova.compute.manager [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-vif-plugged-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1037.386150] env[61998]: DEBUG oslo_concurrency.lockutils [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] Acquiring lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.386330] env[61998]: DEBUG oslo_concurrency.lockutils [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.386528] env[61998]: DEBUG oslo_concurrency.lockutils [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.386706] env[61998]: DEBUG nova.compute.manager [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] No waiting events found dispatching network-vif-plugged-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1037.386877] env[61998]: WARNING nova.compute.manager [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received unexpected event network-vif-plugged-723ec6af-ec74-4c82-ae7c-4795b74d6aad for instance with vm_state building and task_state spawning. [ 1037.387078] env[61998]: DEBUG nova.compute.manager [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1037.387242] env[61998]: DEBUG nova.compute.manager [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing instance network info cache due to event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1037.387427] env[61998]: DEBUG oslo_concurrency.lockutils [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.387593] env[61998]: DEBUG oslo_concurrency.lockutils [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.387762] env[61998]: DEBUG nova.network.neutron [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1037.415076] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388973, 'name': Rename_Task, 'duration_secs': 0.180418} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.417984] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.418267] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388972, 'name': PowerOffVM_Task, 'duration_secs': 0.225839} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.418502] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec16ec47-ff1c-43e1-9e7a-9d96d9e3dbf2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.420023] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.425589] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Reconfiguring VM instance instance-00000063 to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1037.426099] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cbf2e4b-9f7d-4a38-9d0a-806d4dd1bdda {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.446271] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1037.446271] env[61998]: value = "task-1388974" [ 1037.446271] env[61998]: _type = "Task" [ 1037.446271] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.446585] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1037.446585] env[61998]: value = "task-1388975" [ 1037.446585] env[61998]: _type = "Task" [ 1037.446585] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.457522] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388975, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.463459] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388974, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.469412] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cb9f40-62d6-621d-a43d-544deefb2531, 'name': SearchDatastore_Task, 'duration_secs': 0.010765} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.469665] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.469974] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a5785859-2a23-478d-b156-1817fbdcb313/a5785859-2a23-478d-b156-1817fbdcb313.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.470323] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-239df9ed-f8e3-482d-bdcd-b2838cccb124 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.478170] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1037.478170] env[61998]: value = "task-1388976" [ 1037.478170] env[61998]: _type = "Task" [ 1037.478170] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.487472] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388976, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.659722] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "refresh_cache-79a15d76-acc3-465d-9ab7-fa61a894affd" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.660115] env[61998]: DEBUG nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Instance network_info: |[{"id": "cb32d9ba-0c75-427c-a8fa-77deb54fcb76", "address": "fa:16:3e:80:d0:82", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb32d9ba-0c", "ovs_interfaceid": "cb32d9ba-0c75-427c-a8fa-77deb54fcb76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1037.660639] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:d0:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb32d9ba-0c75-427c-a8fa-77deb54fcb76', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1037.669474] env[61998]: DEBUG oslo.service.loopingcall [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.670169] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1037.670461] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec40031f-f812-4b77-af86-3febd0c5f794 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.694348] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1037.694348] env[61998]: value = "task-1388977" [ 1037.694348] env[61998]: _type = "Task" [ 1037.694348] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.703687] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388977, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.737755] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.920246] env[61998]: DEBUG nova.network.neutron [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1037.965838] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388974, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.966155] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388975, 'name': ReconfigVM_Task, 'duration_secs': 0.267852} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.969062] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Reconfigured VM instance instance-00000063 to detach disk 2001 {{(pid=61998) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1037.969062] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.969062] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea082013-18f7-4876-a2f5-1a04d24fc390 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.976596] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1037.976596] env[61998]: value = "task-1388978" [ 1037.976596] env[61998]: _type = "Task" [ 1037.976596] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.988184] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.991395] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388976, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495921} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.991638] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] a5785859-2a23-478d-b156-1817fbdcb313/a5785859-2a23-478d-b156-1817fbdcb313.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.991876] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.992118] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b723278-d71a-4931-a103-57e0fc58fd6e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.999778] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1037.999778] env[61998]: value = "task-1388979" [ 1037.999778] env[61998]: _type = "Task" [ 1037.999778] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.009181] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.010352] env[61998]: DEBUG nova.network.neutron [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.203989] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388977, 'name': CreateVM_Task, 'duration_secs': 0.481887} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.204206] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1038.204841] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.205027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.205364] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1038.205625] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-185f4ec9-58fc-49ba-ac03-4b533028e19b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.210503] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1038.210503] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd8ac5-19e5-9271-fe98-55e47a30a2af" [ 1038.210503] env[61998]: _type = "Task" [ 1038.210503] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.218334] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd8ac5-19e5-9271-fe98-55e47a30a2af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.457419] env[61998]: DEBUG oslo_vmware.api [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388974, 'name': PowerOnVM_Task, 'duration_secs': 0.724153} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.457687] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.457892] env[61998]: INFO nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1038.458090] env[61998]: DEBUG nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1038.458882] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc448a92-5879-4480-acf7-0e983aa37847 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.485458] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388978, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.510631] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078314} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.510867] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.511658] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c103122-3be3-4ae0-b9ef-96d9b726eb2c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.514583] env[61998]: DEBUG oslo_concurrency.lockutils [req-2edbcac0-8603-4354-a507-c94497283652 req-a6ab5d01-328d-4e97-b1f2-12aa215c0df3 service nova] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.515161] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.515320] env[61998]: DEBUG nova.network.neutron [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1038.537083] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] a5785859-2a23-478d-b156-1817fbdcb313/a5785859-2a23-478d-b156-1817fbdcb313.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.537617] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8219f5a8-be41-4649-a847-9a3690b6138f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.558785] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1038.558785] env[61998]: value = "task-1388980" [ 1038.558785] env[61998]: _type = "Task" [ 1038.558785] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.570137] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388980, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.722306] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52cd8ac5-19e5-9271-fe98-55e47a30a2af, 'name': SearchDatastore_Task, 'duration_secs': 0.024558} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.722649] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.722858] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.723114] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.723266] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.723448] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.723713] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-211c887a-b31d-494f-8665-ebcfd6a519d7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.732842] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.733034] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1038.733748] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-379e2d25-ea36-4e6a-8917-251d41154457 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.739160] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1038.739160] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52b6b177-705a-3a96-9bc3-ea0af3f077b7" [ 1038.739160] env[61998]: _type = "Task" [ 1038.739160] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.748701] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b6b177-705a-3a96-9bc3-ea0af3f077b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.974578] env[61998]: INFO nova.compute.manager [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Took 15.50 seconds to build instance. [ 1038.987059] env[61998]: DEBUG oslo_vmware.api [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388978, 'name': PowerOnVM_Task, 'duration_secs': 0.636769} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.987340] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.987588] env[61998]: DEBUG nova.compute.manager [None req-9ccaa8de-882d-400c-a356-bd4935bae0cd tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1038.988459] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e25587f-6a0a-47c2-9e2b-d0d4e30b5096 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.050454] env[61998]: DEBUG nova.network.neutron [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1039.070937] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388980, 'name': ReconfigVM_Task, 'duration_secs': 0.294392} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.071285] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Reconfigured VM instance instance-00000065 to attach disk [datastore2] a5785859-2a23-478d-b156-1817fbdcb313/a5785859-2a23-478d-b156-1817fbdcb313.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.071924] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21a7781a-5af2-44e3-9f32-df874099ca1d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.080867] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1039.080867] env[61998]: value = "task-1388981" [ 1039.080867] env[61998]: _type = "Task" [ 1039.080867] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.086759] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388981, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.228501] env[61998]: DEBUG nova.network.neutron [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.251847] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52b6b177-705a-3a96-9bc3-ea0af3f077b7, 'name': SearchDatastore_Task, 'duration_secs': 0.00958} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.252699] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bf4a3da-c746-4b34-b1ac-2c8deacde5a2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.259130] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1039.259130] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52d9b698-1e8a-009d-4906-789a102efa9e" [ 1039.259130] env[61998]: _type = "Task" [ 1039.259130] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.268027] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52d9b698-1e8a-009d-4906-789a102efa9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.476650] env[61998]: DEBUG oslo_concurrency.lockutils [None req-41b2904e-5c1f-4532-9e21-d5a6ee6d46f6 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.016s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.589205] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388981, 'name': Rename_Task, 'duration_secs': 0.202951} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.589446] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.589732] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e6a3bd9-8ceb-4822-9a63-906173b48056 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.597325] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1039.597325] env[61998]: value = "task-1388982" [ 1039.597325] env[61998]: _type = "Task" [ 1039.597325] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.605702] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.731087] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.731446] env[61998]: DEBUG nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Instance network_info: |[{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1039.731896] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:8d:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '723ec6af-ec74-4c82-ae7c-4795b74d6aad', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.739485] env[61998]: DEBUG oslo.service.loopingcall [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.739703] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.739931] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1aff0a2-8146-4913-81e0-622e84fd2843 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.761358] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.761358] env[61998]: value = "task-1388983" [ 1039.761358] env[61998]: _type = "Task" [ 1039.761358] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.774672] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52d9b698-1e8a-009d-4906-789a102efa9e, 'name': SearchDatastore_Task, 'duration_secs': 0.011298} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.777915] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.778208] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 79a15d76-acc3-465d-9ab7-fa61a894affd/79a15d76-acc3-465d-9ab7-fa61a894affd.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1039.778512] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388983, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.778729] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-692edc57-d9e5-41fb-a296-e8c5902d080b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.786642] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1039.786642] env[61998]: value = "task-1388984" [ 1039.786642] env[61998]: _type = "Task" [ 1039.786642] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.796655] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.828397] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.828880] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.829226] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.829514] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.829815] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.832540] env[61998]: INFO nova.compute.manager [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Terminating instance [ 1039.834875] env[61998]: DEBUG nova.compute.manager [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1039.835158] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.836244] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbf38e6-8c2d-430d-98f2-5cd0b5a83a20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.845014] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.845668] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d6ff120-298a-484d-9ec9-c4b8e9651818 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.853319] env[61998]: DEBUG oslo_vmware.api [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1039.853319] env[61998]: value = "task-1388985" [ 1039.853319] env[61998]: _type = "Task" [ 1039.853319] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.863319] env[61998]: DEBUG oslo_vmware.api [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.108873] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388982, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.276345] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388983, 'name': CreateVM_Task} progress is 25%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.298103] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388984, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.364551] env[61998]: DEBUG oslo_vmware.api [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388985, 'name': PowerOffVM_Task, 'duration_secs': 0.209841} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.364768] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.364941] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.365212] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-974338c1-fc32-42e7-bb5e-253ae771749d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.459012] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.459287] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.459473] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Deleting the datastore file [datastore1] 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.459755] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abcd535c-50af-4a36-86ed-1d9fcfc0f72d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.466962] env[61998]: DEBUG oslo_vmware.api [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1040.466962] env[61998]: value = "task-1388987" [ 1040.466962] env[61998]: _type = "Task" [ 1040.466962] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.475420] env[61998]: DEBUG oslo_vmware.api [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.608716] env[61998]: DEBUG oslo_vmware.api [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388982, 'name': PowerOnVM_Task, 'duration_secs': 0.51286} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.608973] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.609194] env[61998]: INFO nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1040.609413] env[61998]: DEBUG nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1040.610189] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5182b6-59e0-46dd-a4e4-5f8e77b79100 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.775645] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1388983, 'name': CreateVM_Task, 'duration_secs': 0.682652} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.775956] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1040.776633] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.776897] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.777201] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1040.777477] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3119b709-2c61-4400-a85a-04c273608bfb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.783049] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1040.783049] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c65e94-420b-2373-f9e9-cde5b822c4c6" [ 1040.783049] env[61998]: _type = "Task" [ 1040.783049] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.794774] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c65e94-420b-2373-f9e9-cde5b822c4c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.800133] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514602} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.800385] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] 79a15d76-acc3-465d-9ab7-fa61a894affd/79a15d76-acc3-465d-9ab7-fa61a894affd.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1040.800645] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1040.800849] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc5b9c43-2832-44f2-96a2-ce6292fbbfcf {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.807614] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1040.807614] env[61998]: value = "task-1388988" [ 1040.807614] env[61998]: _type = "Task" [ 1040.807614] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.816539] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.977086] env[61998]: DEBUG oslo_vmware.api [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165949} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.977384] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.977582] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.977764] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.977938] env[61998]: INFO nova.compute.manager [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1040.978382] env[61998]: DEBUG oslo.service.loopingcall [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.978457] env[61998]: DEBUG nova.compute.manager [-] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1040.978532] env[61998]: DEBUG nova.network.neutron [-] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.126902] env[61998]: INFO nova.compute.manager [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Took 17.39 seconds to build instance. [ 1041.234904] env[61998]: DEBUG nova.compute.manager [req-77d24a60-6e5e-4e3b-8ec6-093668c63523 req-923e62e6-1b65-4f12-8f6f-6191cb52a59b service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Received event network-vif-deleted-c12a8246-774e-4a6e-8ff1-29a553b5f0ee {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1041.235216] env[61998]: INFO nova.compute.manager [req-77d24a60-6e5e-4e3b-8ec6-093668c63523 req-923e62e6-1b65-4f12-8f6f-6191cb52a59b service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Neutron deleted interface c12a8246-774e-4a6e-8ff1-29a553b5f0ee; detaching it from the instance and deleting it from the info cache [ 1041.235509] env[61998]: DEBUG nova.network.neutron [req-77d24a60-6e5e-4e3b-8ec6-093668c63523 req-923e62e6-1b65-4f12-8f6f-6191cb52a59b service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.295513] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c65e94-420b-2373-f9e9-cde5b822c4c6, 'name': SearchDatastore_Task, 'duration_secs': 0.01541} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.295842] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.296099] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.296471] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.296639] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.296824] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.297114] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03a9be41-952e-4b2c-a515-a6c4a824ab28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.317834] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071209} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.319188] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1041.319369] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.319436] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.320547] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35161921-1802-453e-bb98-f4f74c06b037 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.322941] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e8d0342-6872-4100-9e30-2efb4c48a282 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.328379] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1041.328379] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]527907e2-924a-8831-6344-7812aa51afa2" [ 1041.328379] env[61998]: _type = "Task" [ 1041.328379] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.346102] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 79a15d76-acc3-465d-9ab7-fa61a894affd/79a15d76-acc3-465d-9ab7-fa61a894affd.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.349175] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32d64b16-fe16-4546-897e-f57b42f725e9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.370282] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]527907e2-924a-8831-6344-7812aa51afa2, 'name': SearchDatastore_Task, 'duration_secs': 0.009991} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.372154] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1041.372154] env[61998]: value = "task-1388989" [ 1041.372154] env[61998]: _type = "Task" [ 1041.372154] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.372361] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60f042ec-01c0-4106-8656-d07ce061a5ff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.381312] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1041.381312] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]523a7b7a-4e72-7942-c6d6-f90607dbdb75" [ 1041.381312] env[61998]: _type = "Task" [ 1041.381312] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.384850] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388989, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.393719] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]523a7b7a-4e72-7942-c6d6-f90607dbdb75, 'name': SearchDatastore_Task, 'duration_secs': 0.010271} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.393980] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.394269] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] dde42f82-2616-43f0-a709-d6a63a63dd0d/dde42f82-2616-43f0-a709-d6a63a63dd0d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.394561] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a017600-8d07-419d-922d-e6c6627f0879 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.404016] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1041.404016] env[61998]: value = "task-1388990" [ 1041.404016] env[61998]: _type = "Task" [ 1041.404016] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.412241] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.629770] env[61998]: DEBUG oslo_concurrency.lockutils [None req-adaff0eb-3114-4012-980a-9aeede007daa tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "a5785859-2a23-478d-b156-1817fbdcb313" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.896s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.704522] env[61998]: DEBUG nova.network.neutron [-] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.737882] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9e77dc3-021f-468d-aeb6-0cee18f6e995 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.750624] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2909ad7a-ef55-4773-a056-35c823cc5c4e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.787434] env[61998]: DEBUG nova.compute.manager [req-77d24a60-6e5e-4e3b-8ec6-093668c63523 req-923e62e6-1b65-4f12-8f6f-6191cb52a59b service nova] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Detach interface failed, port_id=c12a8246-774e-4a6e-8ff1-29a553b5f0ee, reason: Instance 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1041.885328] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388989, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.913573] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498115} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.913841] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore2] dde42f82-2616-43f0-a709-d6a63a63dd0d/dde42f82-2616-43f0-a709-d6a63a63dd0d.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1041.914113] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1041.914413] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca58da06-fe27-4039-9626-d3662281b385 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.921315] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1041.921315] env[61998]: value = "task-1388991" [ 1041.921315] env[61998]: _type = "Task" [ 1041.921315] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.929648] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.207504] env[61998]: INFO nova.compute.manager [-] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Took 1.23 seconds to deallocate network for instance. [ 1042.385529] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388989, 'name': ReconfigVM_Task, 'duration_secs': 0.668177} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.385945] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 79a15d76-acc3-465d-9ab7-fa61a894affd/79a15d76-acc3-465d-9ab7-fa61a894affd.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.386648] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7acc4f4d-65f1-46e1-b414-b5c4c89b94c5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.393238] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1042.393238] env[61998]: value = "task-1388992" [ 1042.393238] env[61998]: _type = "Task" [ 1042.393238] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.401280] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388992, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.432852] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077779} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.433145] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1042.433929] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90df4c77-384c-4e9a-831f-93d1b1d5f7d8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.456708] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] dde42f82-2616-43f0-a709-d6a63a63dd0d/dde42f82-2616-43f0-a709-d6a63a63dd0d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.456960] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82241ad4-5a07-41ad-9e79-ca240b930f77 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.476207] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1042.476207] env[61998]: value = "task-1388993" [ 1042.476207] env[61998]: _type = "Task" [ 1042.476207] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.483992] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.714797] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.715103] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.715337] env[61998]: DEBUG nova.objects.instance [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lazy-loading 'resources' on Instance uuid 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.903734] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388992, 'name': Rename_Task, 'duration_secs': 0.443954} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.904068] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.904285] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-673bec09-0b74-45e5-a672-b5d7db61434f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.911765] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1042.911765] env[61998]: value = "task-1388994" [ 1042.911765] env[61998]: _type = "Task" [ 1042.911765] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.919971] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.988197] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.315923] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52228b6f-cf0a-4af3-acbe-2df6ebf5daff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.325583] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4674bf6-52d5-47e5-930d-6f14d52813b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.356435] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237d2105-471e-4979-a49e-a5ccd31a9445 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.364772] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae4872d-7b10-4404-b68f-ae1043d54b33 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.379929] env[61998]: DEBUG nova.compute.provider_tree [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.422988] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388994, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.487253] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388993, 'name': ReconfigVM_Task, 'duration_secs': 0.738246} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.487530] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Reconfigured VM instance instance-00000067 to attach disk [datastore2] dde42f82-2616-43f0-a709-d6a63a63dd0d/dde42f82-2616-43f0-a709-d6a63a63dd0d.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.488163] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7aee4b5d-2643-428b-86bb-274d7e3dcddd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.494961] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1043.494961] env[61998]: value = "task-1388995" [ 1043.494961] env[61998]: _type = "Task" [ 1043.494961] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.506035] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388995, 'name': Rename_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.882863] env[61998]: DEBUG nova.scheduler.client.report [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1043.923554] env[61998]: DEBUG oslo_vmware.api [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1388994, 'name': PowerOnVM_Task, 'duration_secs': 0.942655} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.923857] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.925041] env[61998]: INFO nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1043.925041] env[61998]: DEBUG nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1043.925634] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffea3e0a-8dd6-40e2-ba38-ed10948282f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.005141] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388995, 'name': Rename_Task, 'duration_secs': 0.353948} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.005470] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.005747] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1356714b-be76-47bd-a133-c9765a445669 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.013064] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1044.013064] env[61998]: value = "task-1388996" [ 1044.013064] env[61998]: _type = "Task" [ 1044.013064] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.020941] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.387934] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.409911] env[61998]: INFO nova.scheduler.client.report [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Deleted allocations for instance 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b [ 1044.447313] env[61998]: INFO nova.compute.manager [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Took 20.37 seconds to build instance. [ 1044.523632] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388996, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.918140] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d822746a-5e2c-40ef-9cf4-620c31d46793 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.089s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.949871] env[61998]: DEBUG oslo_concurrency.lockutils [None req-05ef700b-1d46-4887-b773-7ea18fdaedff tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.879s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.026800] env[61998]: DEBUG oslo_vmware.api [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1388996, 'name': PowerOnVM_Task, 'duration_secs': 0.909591} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.027059] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.027376] env[61998]: INFO nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1045.027618] env[61998]: DEBUG nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1045.028564] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db7edd4-943b-4c50-8d01-56eeb1f861a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.548614] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.548943] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.549188] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.549390] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.549560] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.551111] env[61998]: INFO nova.compute.manager [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Took 19.15 seconds to build instance. [ 1045.552081] env[61998]: INFO nova.compute.manager [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Terminating instance [ 1045.553653] env[61998]: DEBUG nova.compute.manager [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1045.553842] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1045.554685] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ed490b-4da7-4b7c-a5ee-7932882f83b1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.563430] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.563694] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5598a52-2615-4815-8d04-f6bf723c54b6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.571819] env[61998]: DEBUG oslo_vmware.api [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1045.571819] env[61998]: value = "task-1388997" [ 1045.571819] env[61998]: _type = "Task" [ 1045.571819] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.580474] env[61998]: DEBUG oslo_vmware.api [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.982201] env[61998]: DEBUG nova.compute.manager [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1045.982428] env[61998]: DEBUG nova.compute.manager [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing instance network info cache due to event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1045.982779] env[61998]: DEBUG oslo_concurrency.lockutils [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.982779] env[61998]: DEBUG oslo_concurrency.lockutils [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.982974] env[61998]: DEBUG nova.network.neutron [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1046.054057] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5bf15118-cc25-4284-85c3-00816251f8a9 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.661s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.082571] env[61998]: DEBUG oslo_vmware.api [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388997, 'name': PowerOffVM_Task, 'duration_secs': 0.256306} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.082883] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1046.083122] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1046.083337] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf5cfe3c-9bd8-4edd-a2bb-6cd77515edb9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.152231] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1046.152567] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1046.152663] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Deleting the datastore file [datastore1] f87f913f-9e6e-4d64-9fe1-0a1fc8564b46 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1046.152953] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f32cf91-2245-4769-8644-ce7aef2ba677 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.160599] env[61998]: DEBUG oslo_vmware.api [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for the task: (returnval){ [ 1046.160599] env[61998]: value = "task-1388999" [ 1046.160599] env[61998]: _type = "Task" [ 1046.160599] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.168962] env[61998]: DEBUG oslo_vmware.api [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.364509] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "cd7775be-954c-4117-b9a9-763bbfb325c4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.364871] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.365152] env[61998]: DEBUG nova.compute.manager [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1046.366387] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc675643-4834-4d9a-b6fd-261930dc6274 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.373759] env[61998]: DEBUG nova.compute.manager [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61998) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3378}} [ 1046.374340] env[61998]: DEBUG nova.objects.instance [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lazy-loading 'flavor' on Instance uuid cd7775be-954c-4117-b9a9-763bbfb325c4 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.672428] env[61998]: DEBUG oslo_vmware.api [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Task: {'id': task-1388999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202028} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.672775] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1046.673011] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1046.673281] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1046.673492] env[61998]: INFO nova.compute.manager [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1046.673807] env[61998]: DEBUG oslo.service.loopingcall [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.674089] env[61998]: DEBUG nova.compute.manager [-] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1046.674215] env[61998]: DEBUG nova.network.neutron [-] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1046.730306] env[61998]: DEBUG nova.network.neutron [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updated VIF entry in instance network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.730878] env[61998]: DEBUG nova.network.neutron [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.904963] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.905233] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.197929] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.198364] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.198623] env[61998]: INFO nova.compute.manager [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Shelving [ 1047.233872] env[61998]: DEBUG oslo_concurrency.lockutils [req-49c5b666-3be7-40c2-ad8e-566b24f0801c req-3d6f3a9b-9ccd-43cc-93c7-d0be2eb40f24 service nova] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.382398] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.382808] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e3b048f-d473-4288-9a5c-9aa1405ada28 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.393067] env[61998]: DEBUG oslo_vmware.api [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1047.393067] env[61998]: value = "task-1389000" [ 1047.393067] env[61998]: _type = "Task" [ 1047.393067] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.402830] env[61998]: DEBUG oslo_vmware.api [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389000, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.407401] env[61998]: DEBUG nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Starting instance... {{(pid=61998) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1047.443610] env[61998]: DEBUG nova.network.neutron [-] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.903274] env[61998]: DEBUG oslo_vmware.api [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389000, 'name': PowerOffVM_Task, 'duration_secs': 0.285763} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.904124] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.904124] env[61998]: DEBUG nova.compute.manager [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1047.904717] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb2248f-73f0-4998-b85c-7beff7136830 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.933404] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.933673] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.936143] env[61998]: INFO nova.compute.claims [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1047.946601] env[61998]: INFO nova.compute.manager [-] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Took 1.27 seconds to deallocate network for instance. [ 1048.006519] env[61998]: DEBUG nova.compute.manager [req-4963c467-092d-4a1c-ad63-9b3c25a8bf37 req-17910c6e-9044-4709-bac0-2dabb0cbfde2 service nova] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Received event network-vif-deleted-dcbfafa6-90f8-46a7-bcc9-c1a0ccb112ba {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1048.208858] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.209316] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a078451-125a-4c23-8b6e-e7f35ebd2045 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.220821] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1048.220821] env[61998]: value = "task-1389001" [ 1048.220821] env[61998]: _type = "Task" [ 1048.220821] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.230963] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389001, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.420809] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b9bc6223-e307-4893-afc8-f37cf2740efb tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.454018] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.731829] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389001, 'name': PowerOffVM_Task, 'duration_secs': 0.313473} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.732116] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.732920] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff75a115-6ac1-4d89-b3ec-6b9d848d3e8e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.751408] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94639ca-b262-4a22-a8a9-50a4b648bf7c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.883198] env[61998]: DEBUG nova.objects.instance [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lazy-loading 'flavor' on Instance uuid cd7775be-954c-4117-b9a9-763bbfb325c4 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.038712] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f83954a-f77b-42aa-980e-17655d8c2038 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.046792] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c9e34c-322a-44cb-bf8d-34580d687443 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.077507] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c9219a-8e17-400b-9ee2-a376f1116481 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.085631] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92867ae5-d9de-4ea6-8716-7cf8683640e2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.099491] env[61998]: DEBUG nova.compute.provider_tree [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.262275] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Creating Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1049.262664] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-50be675a-68d5-49a8-87f4-ca627a252e56 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.272408] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1049.272408] env[61998]: value = "task-1389002" [ 1049.272408] env[61998]: _type = "Task" [ 1049.272408] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.281196] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389002, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.388403] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.388630] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquired lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.388819] env[61998]: DEBUG nova.network.neutron [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.389018] env[61998]: DEBUG nova.objects.instance [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lazy-loading 'info_cache' on Instance uuid cd7775be-954c-4117-b9a9-763bbfb325c4 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.603120] env[61998]: DEBUG nova.scheduler.client.report [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1049.783406] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389002, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.893273] env[61998]: DEBUG nova.objects.base [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1050.108049] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.108678] env[61998]: DEBUG nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Start building networks asynchronously for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1050.114033] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.660s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.114033] env[61998]: DEBUG nova.objects.instance [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lazy-loading 'resources' on Instance uuid f87f913f-9e6e-4d64-9fe1-0a1fc8564b46 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.284760] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389002, 'name': CreateSnapshot_Task, 'duration_secs': 0.874554} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.285135] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Created Snapshot of the VM instance {{(pid=61998) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1050.285889] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e58d2e-5e26-401a-b188-930526eb621d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.601191] env[61998]: DEBUG nova.network.neutron [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Updating instance_info_cache with network_info: [{"id": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "address": "fa:16:3e:13:d7:e5", "network": {"id": "0465a764-ad68-4418-8f04-995d523674e9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1211841276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7647715795e34176aebe4087ee3a3b42", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ac1ddd1-57", "ovs_interfaceid": "4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.616530] env[61998]: DEBUG nova.compute.utils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Using /dev/sd instead of None {{(pid=61998) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1050.617893] env[61998]: DEBUG nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Allocating IP information in the background. {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1050.618097] env[61998]: DEBUG nova.network.neutron [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1050.662387] env[61998]: DEBUG nova.policy [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1050.728102] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5930570a-cfdf-4679-bc9b-3891cf0bb2b2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.736604] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ca8c74-8857-40ec-941e-2509514d2c5e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.768321] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75eeee73-077e-4555-8e10-44423e48223d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.776755] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cd2d48-364b-48f8-9443-b02b9086b091 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.793197] env[61998]: DEBUG nova.compute.provider_tree [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.803142] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Creating linked-clone VM from snapshot {{(pid=61998) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1050.804077] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-49cdce9d-0328-4f17-899a-4ba8459e60a9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.813766] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1050.813766] env[61998]: value = "task-1389003" [ 1050.813766] env[61998]: _type = "Task" [ 1050.813766] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.823798] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389003, 'name': CloneVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.941354] env[61998]: DEBUG nova.network.neutron [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Successfully created port: 0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1051.103980] env[61998]: DEBUG oslo_concurrency.lockutils [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Releasing lock "refresh_cache-cd7775be-954c-4117-b9a9-763bbfb325c4" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.123753] env[61998]: DEBUG nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Start building block device mappings for instance. {{(pid=61998) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1051.296547] env[61998]: DEBUG nova.scheduler.client.report [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1051.324875] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389003, 'name': CloneVM_Task} progress is 94%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.802646] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.825239] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389003, 'name': CloneVM_Task} progress is 95%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.827130] env[61998]: INFO nova.scheduler.client.report [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Deleted allocations for instance f87f913f-9e6e-4d64-9fe1-0a1fc8564b46 [ 1052.113840] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.114198] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fdf0a2a2-f11e-4479-8dc4-09968a520946 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.122630] env[61998]: DEBUG oslo_vmware.api [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1052.122630] env[61998]: value = "task-1389004" [ 1052.122630] env[61998]: _type = "Task" [ 1052.122630] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.131090] env[61998]: DEBUG oslo_vmware.api [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389004, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.134653] env[61998]: DEBUG nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Start spawning the instance on the hypervisor. {{(pid=61998) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1052.161427] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-31T11:47:43Z,direct_url=,disk_format='vmdk',id=a90c4a31-8bcc-48cf-ada7-7369ab14c460,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='963dfe7ac463431ca9e784c01b2f3013',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-31T11:47:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1052.161701] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1052.161867] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.162074] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1052.162235] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.162458] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1052.162735] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1052.162957] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1052.163183] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1052.163369] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1052.163550] env[61998]: DEBUG nova.virt.hardware [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1052.164439] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46d7d8b-6de0-45a9-8161-a35fbc51863d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.172580] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e83026-5a3a-4104-a68c-32b2ab855014 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.325730] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389003, 'name': CloneVM_Task, 'duration_secs': 1.201573} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.326368] env[61998]: INFO nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Created linked-clone VM from snapshot [ 1052.326774] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae188d92-ae55-413d-b04b-d1cc56cf8cb4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.337928] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Uploading image 33dfbf2a-e277-437f-b9d9-945de9717988 {{(pid=61998) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1052.340264] env[61998]: DEBUG oslo_concurrency.lockutils [None req-96f71767-2347-46b7-a03e-3ae38190d751 tempest-ServerRescueTestJSON-1259700960 tempest-ServerRescueTestJSON-1259700960-project-member] Lock "f87f913f-9e6e-4d64-9fe1-0a1fc8564b46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.791s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.357843] env[61998]: DEBUG nova.compute.manager [req-132b8f3f-712a-4e9c-a730-8bcecf6af4dc req-b5b70716-ac9c-429d-a283-bf04957be640 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-vif-plugged-0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1052.358164] env[61998]: DEBUG oslo_concurrency.lockutils [req-132b8f3f-712a-4e9c-a730-8bcecf6af4dc req-b5b70716-ac9c-429d-a283-bf04957be640 service nova] Acquiring lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.358488] env[61998]: DEBUG oslo_concurrency.lockutils [req-132b8f3f-712a-4e9c-a730-8bcecf6af4dc req-b5b70716-ac9c-429d-a283-bf04957be640 service nova] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.358576] env[61998]: DEBUG oslo_concurrency.lockutils [req-132b8f3f-712a-4e9c-a730-8bcecf6af4dc req-b5b70716-ac9c-429d-a283-bf04957be640 service nova] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.358834] env[61998]: DEBUG nova.compute.manager [req-132b8f3f-712a-4e9c-a730-8bcecf6af4dc req-b5b70716-ac9c-429d-a283-bf04957be640 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] No waiting events found dispatching network-vif-plugged-0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1052.359039] env[61998]: WARNING nova.compute.manager [req-132b8f3f-712a-4e9c-a730-8bcecf6af4dc req-b5b70716-ac9c-429d-a283-bf04957be640 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received unexpected event network-vif-plugged-0891a509-232a-48e5-be4b-73a585033317 for instance with vm_state building and task_state spawning. [ 1052.378626] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1052.378626] env[61998]: value = "vm-294795" [ 1052.378626] env[61998]: _type = "VirtualMachine" [ 1052.378626] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1052.379345] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f0ca720d-bcbe-4b0c-907f-c118273f5a5c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.387909] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease: (returnval){ [ 1052.387909] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529a401d-cf23-938f-2679-bc166f08cc3a" [ 1052.387909] env[61998]: _type = "HttpNfcLease" [ 1052.387909] env[61998]: } obtained for exporting VM: (result){ [ 1052.387909] env[61998]: value = "vm-294795" [ 1052.387909] env[61998]: _type = "VirtualMachine" [ 1052.387909] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1052.388227] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the lease: (returnval){ [ 1052.388227] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529a401d-cf23-938f-2679-bc166f08cc3a" [ 1052.388227] env[61998]: _type = "HttpNfcLease" [ 1052.388227] env[61998]: } to be ready. {{(pid=61998) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1052.395602] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1052.395602] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529a401d-cf23-938f-2679-bc166f08cc3a" [ 1052.395602] env[61998]: _type = "HttpNfcLease" [ 1052.395602] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1052.497784] env[61998]: DEBUG nova.network.neutron [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Successfully updated port: 0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1052.633633] env[61998]: DEBUG oslo_vmware.api [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389004, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.896803] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1052.896803] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529a401d-cf23-938f-2679-bc166f08cc3a" [ 1052.896803] env[61998]: _type = "HttpNfcLease" [ 1052.896803] env[61998]: } is ready. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1052.897130] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1052.897130] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]529a401d-cf23-938f-2679-bc166f08cc3a" [ 1052.897130] env[61998]: _type = "HttpNfcLease" [ 1052.897130] env[61998]: }. {{(pid=61998) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1052.897874] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d1b36a-322f-4acc-aaf2-ae6305890bd3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.906566] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5272c9bd-117b-3e2a-b03a-181c8af1973f/disk-0.vmdk from lease info. {{(pid=61998) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1052.906758] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5272c9bd-117b-3e2a-b03a-181c8af1973f/disk-0.vmdk for reading. {{(pid=61998) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1053.000804] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.000953] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.001128] env[61998]: DEBUG nova.network.neutron [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1053.018941] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e92f373b-da52-4d37-a2f9-8f40375d2b4d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.134606] env[61998]: DEBUG oslo_vmware.api [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389004, 'name': PowerOnVM_Task, 'duration_secs': 0.573406} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.134891] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.135113] env[61998]: DEBUG nova.compute.manager [None req-eadc8801-ad33-4542-a161-96627011244f tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1053.136057] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76db4185-9828-44a5-a79f-101545823b6b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.573299] env[61998]: DEBUG nova.network.neutron [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Instance cache missing network info. {{(pid=61998) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1053.848715] env[61998]: DEBUG nova.network.neutron [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.355690] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.356220] env[61998]: DEBUG nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Instance network_info: |[{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61998) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1054.356761] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:86:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0891a509-232a-48e5-be4b-73a585033317', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1054.365655] env[61998]: DEBUG oslo.service.loopingcall [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1054.366188] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1054.366601] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf147250-69e9-4c44-a6d3-c674672544ee {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.390876] env[61998]: DEBUG nova.compute.manager [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-changed-0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1054.391335] env[61998]: DEBUG nova.compute.manager [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing instance network info cache due to event network-changed-0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1054.392034] env[61998]: DEBUG oslo_concurrency.lockutils [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.392034] env[61998]: DEBUG oslo_concurrency.lockutils [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.392392] env[61998]: DEBUG nova.network.neutron [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing network info cache for port 0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1054.401855] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1054.401855] env[61998]: value = "task-1389006" [ 1054.401855] env[61998]: _type = "Task" [ 1054.401855] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.414211] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389006, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.913267] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389006, 'name': CreateVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.276103] env[61998]: DEBUG nova.network.neutron [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updated VIF entry in instance network info cache for port 0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1055.276565] env[61998]: DEBUG nova.network.neutron [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.414308] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389006, 'name': CreateVM_Task, 'duration_secs': 0.517773} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.414528] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1055.415326] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.415533] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.415895] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1055.416546] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6bf1ebf-7ac2-40d7-8588-8a2368c0d2d5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.422288] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1055.422288] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]522b92cb-5276-5e8e-8589-09eeb43fd087" [ 1055.422288] env[61998]: _type = "Task" [ 1055.422288] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.432207] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522b92cb-5276-5e8e-8589-09eeb43fd087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.778909] env[61998]: DEBUG oslo_concurrency.lockutils [req-f23d5b41-b9fe-407b-9584-1432a84afd97 req-a442c527-be10-4a74-a4bc-51d39aeefbbc service nova] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.933049] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]522b92cb-5276-5e8e-8589-09eeb43fd087, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.933377] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.933600] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Processing image a90c4a31-8bcc-48cf-ada7-7369ab14c460 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1055.933940] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.934126] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.934317] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1055.934578] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c55b99b-9771-48a4-ad6b-06590cff4aff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.943414] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1055.943597] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1055.944397] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c7217c3-0307-4f3b-8b1d-57dd16dead31 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.950718] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1055.950718] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52c56ba9-f363-a478-d1d5-a4c4a2e96341" [ 1055.950718] env[61998]: _type = "Task" [ 1055.950718] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.959961] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c56ba9-f363-a478-d1d5-a4c4a2e96341, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.186452] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "79a15d76-acc3-465d-9ab7-fa61a894affd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.186835] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.187121] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "79a15d76-acc3-465d-9ab7-fa61a894affd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.187320] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.187499] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.190052] env[61998]: INFO nova.compute.manager [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Terminating instance [ 1056.191975] env[61998]: DEBUG nova.compute.manager [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1056.192194] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.193046] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44120dbf-8805-4600-9751-05e324a6fd1f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.201877] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.202152] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06c05454-b553-4f10-bec2-3e341bd5995b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.210553] env[61998]: DEBUG oslo_vmware.api [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1056.210553] env[61998]: value = "task-1389007" [ 1056.210553] env[61998]: _type = "Task" [ 1056.210553] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.220277] env[61998]: DEBUG oslo_vmware.api [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.462263] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52c56ba9-f363-a478-d1d5-a4c4a2e96341, 'name': SearchDatastore_Task, 'duration_secs': 0.010318} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.463058] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-688e0e6d-108c-4d95-96a1-51e5b4dbf53b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.468585] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1056.468585] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52fec1c6-4a20-8c89-06f6-b02dd40f674d" [ 1056.468585] env[61998]: _type = "Task" [ 1056.468585] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.476017] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52fec1c6-4a20-8c89-06f6-b02dd40f674d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.721554] env[61998]: DEBUG oslo_vmware.api [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389007, 'name': PowerOffVM_Task, 'duration_secs': 0.437021} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.721886] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.721995] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.722265] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca5eeb4a-6352-4f0f-bbe4-8f687fe8024c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.842204] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.842573] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.842879] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleting the datastore file [datastore2] 79a15d76-acc3-465d-9ab7-fa61a894affd {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.843283] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23a6cae2-e396-4cba-9b46-2f8072381ada {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.851769] env[61998]: DEBUG oslo_vmware.api [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1056.851769] env[61998]: value = "task-1389009" [ 1056.851769] env[61998]: _type = "Task" [ 1056.851769] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.861472] env[61998]: DEBUG oslo_vmware.api [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.981027] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52fec1c6-4a20-8c89-06f6-b02dd40f674d, 'name': SearchDatastore_Task, 'duration_secs': 0.027077} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.981027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.981027] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] c16a959d-9c28-480b-aa62-51e7804ad0ed/c16a959d-9c28-480b-aa62-51e7804ad0ed.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1056.981702] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cbe6319-2bdb-4671-87f4-3ea989d618a1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.990923] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1056.990923] env[61998]: value = "task-1389010" [ 1056.990923] env[61998]: _type = "Task" [ 1056.990923] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.999405] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389010, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.362829] env[61998]: DEBUG oslo_vmware.api [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179159} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.363027] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.363468] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.363680] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.363865] env[61998]: INFO nova.compute.manager [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1057.364144] env[61998]: DEBUG oslo.service.loopingcall [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.364360] env[61998]: DEBUG nova.compute.manager [-] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1057.364458] env[61998]: DEBUG nova.network.neutron [-] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.504585] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389010, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.622101] env[61998]: DEBUG nova.compute.manager [req-5de75d1c-61ea-43fe-b521-18c4992e3f1f req-c9b7c787-d4c3-4e2e-b3d4-7175039680bb service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Received event network-vif-deleted-cb32d9ba-0c75-427c-a8fa-77deb54fcb76 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1057.622186] env[61998]: INFO nova.compute.manager [req-5de75d1c-61ea-43fe-b521-18c4992e3f1f req-c9b7c787-d4c3-4e2e-b3d4-7175039680bb service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Neutron deleted interface cb32d9ba-0c75-427c-a8fa-77deb54fcb76; detaching it from the instance and deleting it from the info cache [ 1057.622375] env[61998]: DEBUG nova.network.neutron [req-5de75d1c-61ea-43fe-b521-18c4992e3f1f req-c9b7c787-d4c3-4e2e-b3d4-7175039680bb service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.004373] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389010, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.096095] env[61998]: DEBUG nova.network.neutron [-] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.125264] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc52882a-ea55-4958-a2a2-955adc7ad2ac {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.135712] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db3db32-1c7b-4f87-b2c6-672f08fcc197 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.166170] env[61998]: DEBUG nova.compute.manager [req-5de75d1c-61ea-43fe-b521-18c4992e3f1f req-c9b7c787-d4c3-4e2e-b3d4-7175039680bb service nova] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Detach interface failed, port_id=cb32d9ba-0c75-427c-a8fa-77deb54fcb76, reason: Instance 79a15d76-acc3-465d-9ab7-fa61a894affd could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1058.503983] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389010, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.599034] env[61998]: INFO nova.compute.manager [-] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Took 1.23 seconds to deallocate network for instance. [ 1059.004738] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389010, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.542795} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.005207] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a90c4a31-8bcc-48cf-ada7-7369ab14c460/a90c4a31-8bcc-48cf-ada7-7369ab14c460.vmdk to [datastore1] c16a959d-9c28-480b-aa62-51e7804ad0ed/c16a959d-9c28-480b-aa62-51e7804ad0ed.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1059.005316] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Extending root virtual disk to 1048576 {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1059.006083] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92524a44-3b36-40f9-82bb-7a4930057192 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.015568] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1059.015568] env[61998]: value = "task-1389011" [ 1059.015568] env[61998]: _type = "Task" [ 1059.015568] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.025420] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.106031] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.106031] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.106031] env[61998]: DEBUG nova.objects.instance [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lazy-loading 'resources' on Instance uuid 79a15d76-acc3-465d-9ab7-fa61a894affd {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.526818] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.703484] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eba9abd-127c-482f-b63b-db6e21478214 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.713046] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a26bf79-e6fa-4053-a75f-7d8f44c70e6c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.745222] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e14010-2972-4307-b336-8b2c0f3dcf12 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.754065] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d321e61-1448-4d16-a20c-eaa0a8096f54 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.768535] env[61998]: DEBUG nova.compute.provider_tree [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.026992] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.272317] env[61998]: DEBUG nova.scheduler.client.report [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1060.526819] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.777143] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.797181] env[61998]: INFO nova.scheduler.client.report [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleted allocations for instance 79a15d76-acc3-465d-9ab7-fa61a894affd [ 1061.028615] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.305399] env[61998]: DEBUG oslo_concurrency.lockutils [None req-c2461a31-3ad8-4bb5-977e-e31ac1a21d11 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "79a15d76-acc3-465d-9ab7-fa61a894affd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.118s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.528716] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.706414] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "a5785859-2a23-478d-b156-1817fbdcb313" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.706559] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "a5785859-2a23-478d-b156-1817fbdcb313" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.706773] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "a5785859-2a23-478d-b156-1817fbdcb313-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.706981] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "a5785859-2a23-478d-b156-1817fbdcb313-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.707180] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "a5785859-2a23-478d-b156-1817fbdcb313-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.709474] env[61998]: INFO nova.compute.manager [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Terminating instance [ 1061.711331] env[61998]: DEBUG nova.compute.manager [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1061.711526] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1061.712367] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5b62d1-20bf-49bc-8992-157c9c29a7c6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.721355] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1061.722407] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2386d95c-7923-4a20-ac1d-04cad0fae90e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.729846] env[61998]: DEBUG oslo_vmware.api [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1061.729846] env[61998]: value = "task-1389012" [ 1061.729846] env[61998]: _type = "Task" [ 1061.729846] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.739110] env[61998]: DEBUG oslo_vmware.api [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.030791] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389011, 'name': ExtendVirtualDisk_Task, 'duration_secs': 2.859366} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.030791] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Extended root virtual disk {{(pid=61998) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.031453] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f7bec7-eb6d-4ed0-8c34-c945a13eda75 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.054109] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] c16a959d-9c28-480b-aa62-51e7804ad0ed/c16a959d-9c28-480b-aa62-51e7804ad0ed.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.054495] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3827056-f0d9-42d5-8e2b-629c9bfe79d1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.074709] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1062.074709] env[61998]: value = "task-1389013" [ 1062.074709] env[61998]: _type = "Task" [ 1062.074709] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.082790] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.240730] env[61998]: DEBUG oslo_vmware.api [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389012, 'name': PowerOffVM_Task, 'duration_secs': 0.205178} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.241033] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.241236] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1062.241475] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaf96059-69bd-4d05-ae60-2ad915a5fdd1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.311432] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1062.311739] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1062.311974] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleting the datastore file [datastore2] a5785859-2a23-478d-b156-1817fbdcb313 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.312306] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbfbac39-2b60-46aa-b660-3308a7d13e9d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.320416] env[61998]: DEBUG oslo_vmware.api [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1062.320416] env[61998]: value = "task-1389015" [ 1062.320416] env[61998]: _type = "Task" [ 1062.320416] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.329938] env[61998]: DEBUG oslo_vmware.api [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.585508] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389013, 'name': ReconfigVM_Task, 'duration_secs': 0.344836} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.586303] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Reconfigured VM instance instance-00000068 to attach disk [datastore1] c16a959d-9c28-480b-aa62-51e7804ad0ed/c16a959d-9c28-480b-aa62-51e7804ad0ed.vmdk or device None with type sparse {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1062.587230] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f54fe34c-c46b-4cb5-a89a-b5fdd38e970f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.595259] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1062.595259] env[61998]: value = "task-1389016" [ 1062.595259] env[61998]: _type = "Task" [ 1062.595259] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.604729] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389016, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.612028] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5272c9bd-117b-3e2a-b03a-181c8af1973f/disk-0.vmdk. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1062.612850] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c0cfc0-4cff-49ea-a37f-befe29a3db72 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.618449] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5272c9bd-117b-3e2a-b03a-181c8af1973f/disk-0.vmdk is in state: ready. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1062.618617] env[61998]: ERROR oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5272c9bd-117b-3e2a-b03a-181c8af1973f/disk-0.vmdk due to incomplete transfer. [ 1062.618842] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8cbe9c91-2a65-4494-9c66-d468ce3e9dc9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.627172] env[61998]: DEBUG oslo_vmware.rw_handles [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5272c9bd-117b-3e2a-b03a-181c8af1973f/disk-0.vmdk. {{(pid=61998) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1062.627381] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Uploaded image 33dfbf2a-e277-437f-b9d9-945de9717988 to the Glance image server {{(pid=61998) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1062.629584] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Destroying the VM {{(pid=61998) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1062.629829] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-396cf75f-3dcc-428b-84c8-4418bf7ce32d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.636325] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1062.636325] env[61998]: value = "task-1389017" [ 1062.636325] env[61998]: _type = "Task" [ 1062.636325] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.643884] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389017, 'name': Destroy_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.830328] env[61998]: DEBUG oslo_vmware.api [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174194} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.830630] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1062.830825] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1062.831016] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1062.831201] env[61998]: INFO nova.compute.manager [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1062.831449] env[61998]: DEBUG oslo.service.loopingcall [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.831678] env[61998]: DEBUG nova.compute.manager [-] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1062.831813] env[61998]: DEBUG nova.network.neutron [-] [instance: a5785859-2a23-478d-b156-1817fbdcb313] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1063.067946] env[61998]: DEBUG nova.compute.manager [req-5e03efc8-f72c-4f1e-9f57-e410f44142cb req-9dfaaabb-0d56-4ed4-b756-913b96730210 service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Received event network-vif-deleted-7fd5361b-02d8-4989-956f-b685ccb4431b {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1063.068270] env[61998]: INFO nova.compute.manager [req-5e03efc8-f72c-4f1e-9f57-e410f44142cb req-9dfaaabb-0d56-4ed4-b756-913b96730210 service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Neutron deleted interface 7fd5361b-02d8-4989-956f-b685ccb4431b; detaching it from the instance and deleting it from the info cache [ 1063.068463] env[61998]: DEBUG nova.network.neutron [req-5e03efc8-f72c-4f1e-9f57-e410f44142cb req-9dfaaabb-0d56-4ed4-b756-913b96730210 service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.105409] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389016, 'name': Rename_Task, 'duration_secs': 0.139944} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.105646] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.105891] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d2ee6b6-99c3-4f3b-9542-adc8d654467d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.113355] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1063.113355] env[61998]: value = "task-1389018" [ 1063.113355] env[61998]: _type = "Task" [ 1063.113355] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.121131] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.146039] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389017, 'name': Destroy_Task, 'duration_secs': 0.329103} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.146411] env[61998]: INFO nova.virt.vmwareapi.vm_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Destroyed the VM [ 1063.146754] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Deleting Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1063.147013] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-48a68111-f3a4-4fe8-955b-d9942d59a916 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.154907] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1063.154907] env[61998]: value = "task-1389019" [ 1063.154907] env[61998]: _type = "Task" [ 1063.154907] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.162724] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389019, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.541775] env[61998]: DEBUG nova.network.neutron [-] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.571009] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92a2bc3d-110b-438f-90ae-765b4d486ab0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.581504] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205590de-6322-4b53-a000-2c884addddfb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.609737] env[61998]: DEBUG nova.compute.manager [req-5e03efc8-f72c-4f1e-9f57-e410f44142cb req-9dfaaabb-0d56-4ed4-b756-913b96730210 service nova] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Detach interface failed, port_id=7fd5361b-02d8-4989-956f-b685ccb4431b, reason: Instance a5785859-2a23-478d-b156-1817fbdcb313 could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1063.622634] env[61998]: DEBUG oslo_vmware.api [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389018, 'name': PowerOnVM_Task, 'duration_secs': 0.49542} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.622932] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1063.623171] env[61998]: INFO nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Took 11.49 seconds to spawn the instance on the hypervisor. [ 1063.623361] env[61998]: DEBUG nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1063.624786] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6729c2-e4e6-488e-ae6c-365b51186cca {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.666666] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389019, 'name': RemoveSnapshot_Task, 'duration_secs': 0.354735} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.667038] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Deleted Snapshot of the VM instance {{(pid=61998) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1063.667364] env[61998]: DEBUG nova.compute.manager [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1063.668295] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806139c6-c148-450c-a7b7-ed727efeb858 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.044892] env[61998]: INFO nova.compute.manager [-] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Took 1.21 seconds to deallocate network for instance. [ 1064.144549] env[61998]: INFO nova.compute.manager [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Took 16.23 seconds to build instance. [ 1064.180058] env[61998]: INFO nova.compute.manager [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Shelve offloading [ 1064.552188] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.552488] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.552782] env[61998]: DEBUG nova.objects.instance [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lazy-loading 'resources' on Instance uuid a5785859-2a23-478d-b156-1817fbdcb313 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.646809] env[61998]: DEBUG oslo_concurrency.lockutils [None req-0654b848-f0e6-4911-ae42-0f244dcc0371 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.741s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.683588] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.683906] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-feaa8016-b696-4800-9404-f3abaa6b2727 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.692335] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1064.692335] env[61998]: value = "task-1389020" [ 1064.692335] env[61998]: _type = "Task" [ 1064.692335] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.701243] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.105408] env[61998]: DEBUG nova.compute.manager [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1065.105634] env[61998]: DEBUG nova.compute.manager [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing instance network info cache due to event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1065.105902] env[61998]: DEBUG oslo_concurrency.lockutils [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.106068] env[61998]: DEBUG oslo_concurrency.lockutils [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.106262] env[61998]: DEBUG nova.network.neutron [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1065.148025] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc330f2-2726-4ddd-810a-d05b13c113ce {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.156417] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a022d7-17ff-4552-84cc-864c2152283e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.190078] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adea3c5-d8e9-4864-a482-452092ed6aaa {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.200027] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ae501b-beb0-427f-b801-77616cc62ced {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.206842] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] VM already powered off {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1065.207078] env[61998]: DEBUG nova.compute.manager [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1065.207833] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bd38ca-40e7-4288-a72b-7667dfda178f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.217848] env[61998]: DEBUG nova.compute.provider_tree [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.224611] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.224787] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.224968] env[61998]: DEBUG nova.network.neutron [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1065.724275] env[61998]: DEBUG nova.scheduler.client.report [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1065.828240] env[61998]: DEBUG nova.network.neutron [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updated VIF entry in instance network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1065.828628] env[61998]: DEBUG nova.network.neutron [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.115226] env[61998]: DEBUG nova.network.neutron [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.120143] env[61998]: DEBUG nova.compute.manager [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-changed-0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1066.120392] env[61998]: DEBUG nova.compute.manager [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing instance network info cache due to event network-changed-0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1066.120648] env[61998]: DEBUG oslo_concurrency.lockutils [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.120814] env[61998]: DEBUG oslo_concurrency.lockutils [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.120983] env[61998]: DEBUG nova.network.neutron [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing network info cache for port 0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1066.231284] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.679s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.256545] env[61998]: INFO nova.scheduler.client.report [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleted allocations for instance a5785859-2a23-478d-b156-1817fbdcb313 [ 1066.330983] env[61998]: DEBUG oslo_concurrency.lockutils [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.331303] env[61998]: DEBUG nova.compute.manager [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-changed-0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1066.331480] env[61998]: DEBUG nova.compute.manager [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing instance network info cache due to event network-changed-0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1066.331670] env[61998]: DEBUG oslo_concurrency.lockutils [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.619860] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.764767] env[61998]: DEBUG oslo_concurrency.lockutils [None req-4d2eb38d-4727-477e-b76b-ded0ac5d2714 tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "a5785859-2a23-478d-b156-1817fbdcb313" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.058s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.821653] env[61998]: DEBUG nova.network.neutron [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updated VIF entry in instance network info cache for port 0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1066.822041] env[61998]: DEBUG nova.network.neutron [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.942019] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.942019] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c063ef-d7bc-4d30-99b0-08b6f0706cf1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.950835] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1066.951457] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cca40ae7-a7ce-4316-95c6-79a107b9e4c3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.028342] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1067.028342] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1067.028342] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleting the datastore file [datastore2] 4c41a59a-59d4-4abd-b173-118e759fc19c {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1067.028342] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1812224-094a-420f-a7b4-99dc68fa9009 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.036426] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1067.036426] env[61998]: value = "task-1389022" [ 1067.036426] env[61998]: _type = "Task" [ 1067.036426] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.044862] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.324978] env[61998]: DEBUG oslo_concurrency.lockutils [req-441d0b25-71e8-4e5a-ba80-813c81adc2f7 req-2e0cbfeb-e8e6-4317-a024-cdb36ad55d07 service nova] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.325453] env[61998]: DEBUG oslo_concurrency.lockutils [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.325692] env[61998]: DEBUG nova.network.neutron [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing network info cache for port 0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1067.484403] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "cd7775be-954c-4117-b9a9-763bbfb325c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.484673] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.484887] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "cd7775be-954c-4117-b9a9-763bbfb325c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.485089] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.485264] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.487546] env[61998]: INFO nova.compute.manager [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Terminating instance [ 1067.489504] env[61998]: DEBUG nova.compute.manager [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1067.489695] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1067.490561] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fda395c-8456-42e7-abab-6cadf4f0b3a0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.500778] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1067.501063] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bb2e466-2e8a-4ba4-96e8-c093270a18a2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.508035] env[61998]: DEBUG oslo_vmware.api [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1067.508035] env[61998]: value = "task-1389023" [ 1067.508035] env[61998]: _type = "Task" [ 1067.508035] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.517204] env[61998]: DEBUG oslo_vmware.api [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.547101] env[61998]: DEBUG oslo_vmware.api [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218163} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.547364] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1067.547560] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1067.547778] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1067.570780] env[61998]: INFO nova.scheduler.client.report [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted allocations for instance 4c41a59a-59d4-4abd-b173-118e759fc19c [ 1067.743791] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.743980] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Cleaning up deleted instances {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11527}} [ 1068.019848] env[61998]: DEBUG oslo_vmware.api [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389023, 'name': PowerOffVM_Task, 'duration_secs': 0.321675} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.020768] env[61998]: DEBUG nova.network.neutron [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updated VIF entry in instance network info cache for port 0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.021136] env[61998]: DEBUG nova.network.neutron [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.022825] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1068.023143] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1068.023406] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ec85281-9139-4b15-8753-60e06f77bc17 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.076027] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.076525] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.076898] env[61998]: DEBUG nova.objects.instance [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'resources' on Instance uuid 4c41a59a-59d4-4abd-b173-118e759fc19c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.089395] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1068.089620] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1068.089811] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleting the datastore file [datastore2] cd7775be-954c-4117-b9a9-763bbfb325c4 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.090095] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f0ab5e7-c1f6-4e37-9d86-f1bd01bbfff6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.096849] env[61998]: DEBUG oslo_vmware.api [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for the task: (returnval){ [ 1068.096849] env[61998]: value = "task-1389025" [ 1068.096849] env[61998]: _type = "Task" [ 1068.096849] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.105025] env[61998]: DEBUG oslo_vmware.api [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.150330] env[61998]: DEBUG nova.compute.manager [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1068.150330] env[61998]: DEBUG nova.compute.manager [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing instance network info cache due to event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1068.150330] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.150495] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.150666] env[61998]: DEBUG nova.network.neutron [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.255372] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] There are 38 instances to clean {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11536}} [ 1068.255648] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 79a15d76-acc3-465d-9ab7-fa61a894affd] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1068.524354] env[61998]: DEBUG oslo_concurrency.lockutils [req-6a83ab20-b71e-4ed5-9edc-ccd14064b00f req-e1099940-389a-410d-a7c3-ec7aa86c73d2 service nova] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.579988] env[61998]: DEBUG nova.objects.instance [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'numa_topology' on Instance uuid 4c41a59a-59d4-4abd-b173-118e759fc19c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.607822] env[61998]: DEBUG oslo_vmware.api [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Task: {'id': task-1389025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137899} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.608112] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.608312] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.608502] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.608857] env[61998]: INFO nova.compute.manager [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1068.608946] env[61998]: DEBUG oslo.service.loopingcall [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1068.609121] env[61998]: DEBUG nova.compute.manager [-] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1068.609214] env[61998]: DEBUG nova.network.neutron [-] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1068.759258] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: a5785859-2a23-478d-b156-1817fbdcb313] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1068.863058] env[61998]: DEBUG nova.network.neutron [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updated VIF entry in instance network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.863454] env[61998]: DEBUG nova.network.neutron [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.082572] env[61998]: DEBUG nova.objects.base [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Object Instance<4c41a59a-59d4-4abd-b173-118e759fc19c> lazy-loaded attributes: resources,numa_topology {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1069.134362] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950d4cc9-6ce1-42b8-af4e-e8f9c156d418 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.142801] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0f61ad-a108-4829-803e-f92a3031104d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.173511] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a63d59-9130-477f-9a9e-86b0c5895b94 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.193298] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc0b48e-0fc3-4685-b44d-8ec36e125e8b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.207398] env[61998]: DEBUG nova.compute.provider_tree [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.262116] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 9c8d8d6d-9b8a-4aa1-8673-4674d3d5d30b] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1069.324017] env[61998]: DEBUG nova.network.neutron [-] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.365667] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.365985] env[61998]: DEBUG nova.compute.manager [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-vif-unplugged-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1069.366228] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.366453] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.366664] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.366793] env[61998]: DEBUG nova.compute.manager [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] No waiting events found dispatching network-vif-unplugged-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1069.366967] env[61998]: WARNING nova.compute.manager [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received unexpected event network-vif-unplugged-33446d72-d352-428e-8a03-c36aaa61c776 for instance with vm_state shelved_offloaded and task_state None. [ 1069.367151] env[61998]: DEBUG nova.compute.manager [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-changed-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1069.367308] env[61998]: DEBUG nova.compute.manager [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing instance network info cache due to event network-changed-33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1069.367495] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.367634] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.367824] env[61998]: DEBUG nova.network.neutron [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1069.711032] env[61998]: DEBUG nova.scheduler.client.report [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1069.765323] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: eb45dbc3-a972-4004-9c9a-9bd908b34723] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1069.826833] env[61998]: INFO nova.compute.manager [-] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Took 1.22 seconds to deallocate network for instance. [ 1070.087950] env[61998]: DEBUG nova.network.neutron [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updated VIF entry in instance network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.088356] env[61998]: DEBUG nova.network.neutron [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap33446d72-d3", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.177775] env[61998]: DEBUG nova.compute.manager [req-d0a61a14-edd9-4968-8740-16ea61ee51e0 req-e9b19c26-af0f-406b-93b6-96f7d0b81162 service nova] [instance: cd7775be-954c-4117-b9a9-763bbfb325c4] Received event network-vif-deleted-4ac1ddd1-5732-4008-a0b6-d6679c3eb7c5 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1070.215748] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.139s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.268257] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: a909297e-ac29-4630-a54b-abd0b6f67893] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1070.333261] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.333552] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.333788] env[61998]: DEBUG nova.objects.instance [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lazy-loading 'resources' on Instance uuid cd7775be-954c-4117-b9a9-763bbfb325c4 {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.470260] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.591586] env[61998]: DEBUG oslo_concurrency.lockutils [req-02034130-0b75-44af-b60a-e9498ec6fca1 req-394b1d2d-a764-4c5a-898d-37777b90ee56 service nova] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.723413] env[61998]: DEBUG oslo_concurrency.lockutils [None req-a5bce2b6-ee02-4852-86b7-3a515e78641a tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.525s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.724599] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.255s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.724917] env[61998]: INFO nova.compute.manager [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Unshelving [ 1070.771342] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: f87f913f-9e6e-4d64-9fe1-0a1fc8564b46] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1070.896557] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7b5aa9-cb30-4efe-9942-a167404cb9e6 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.905345] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900f6285-daf1-4e67-b376-6852ce0c68fd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.936248] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ee632b-fb38-4f14-bc1d-04603f0f26ea {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.943733] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295a1d7a-db69-4519-81ce-1d0bcf2fa13b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.957312] env[61998]: DEBUG nova.compute.provider_tree [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.275236] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 9de9cc49-7a81-4975-88df-5351125b180c] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1071.460861] env[61998]: DEBUG nova.scheduler.client.report [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1071.746521] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.780189] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 101d9d29-24b4-4c4d-bf7a-70abfd200be9] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1071.966904] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.969265] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.223s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.969503] env[61998]: DEBUG nova.objects.instance [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'pci_requests' on Instance uuid 4c41a59a-59d4-4abd-b173-118e759fc19c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.984284] env[61998]: INFO nova.scheduler.client.report [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Deleted allocations for instance cd7775be-954c-4117-b9a9-763bbfb325c4 [ 1072.283364] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5789b2bc-a8c5-4986-bb53-7175cd566142] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1072.473748] env[61998]: DEBUG nova.objects.instance [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'numa_topology' on Instance uuid 4c41a59a-59d4-4abd-b173-118e759fc19c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.491477] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5cdf5a9c-3562-4cf0-9910-a37d72dc123d tempest-ListServerFiltersTestJSON-223422133 tempest-ListServerFiltersTestJSON-223422133-project-member] Lock "cd7775be-954c-4117-b9a9-763bbfb325c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.007s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.786601] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 4ab6f2f2-07c8-4477-a433-b6408cd919bc] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1072.976639] env[61998]: INFO nova.compute.claims [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.290689] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 89b6f21c-fc5a-40fe-a9a4-dee3de1a7ca1] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1073.794153] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: c924f793-852e-4f45-85b1-b1e3fdc5d60d] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1074.038732] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88a77b0-d4be-4fc8-b358-16c0efed2c6b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.047707] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d9fd81-19cc-43d2-8513-c43084dbc83f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.078592] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da508765-8b98-479d-85e0-ef17fdebb29c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.085798] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b03459-6d2a-4c16-bc8a-8b3d9a7a20f0 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.099929] env[61998]: DEBUG nova.compute.provider_tree [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.298094] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 58626303-4d70-48bb-9aaf-1b54cef92a76] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1074.603097] env[61998]: DEBUG nova.scheduler.client.report [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1074.801493] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: a67aa33f-c7ba-44da-bdfa-e0a53a8538ad] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1075.108069] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.139s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.137820] env[61998]: INFO nova.network.neutron [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating port 33446d72-d352-428e-8a03-c36aaa61c776 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1075.304962] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 34143cac-64e9-41fd-a970-b593d1472d92] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1075.809924] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 93df4e9a-29d2-4551-9bda-58b02163c116] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1076.313384] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 43ff4071-05f5-4e5c-a46d-1ca6c99809f0] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1076.536902] env[61998]: DEBUG nova.compute.manager [req-6bc27b34-6c45-4f18-b8f1-8ba2660a281e req-7efb8e42-1823-41a4-84d1-ea831a84d2b6 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-vif-plugged-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1076.537171] env[61998]: DEBUG oslo_concurrency.lockutils [req-6bc27b34-6c45-4f18-b8f1-8ba2660a281e req-7efb8e42-1823-41a4-84d1-ea831a84d2b6 service nova] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.537358] env[61998]: DEBUG oslo_concurrency.lockutils [req-6bc27b34-6c45-4f18-b8f1-8ba2660a281e req-7efb8e42-1823-41a4-84d1-ea831a84d2b6 service nova] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.537549] env[61998]: DEBUG oslo_concurrency.lockutils [req-6bc27b34-6c45-4f18-b8f1-8ba2660a281e req-7efb8e42-1823-41a4-84d1-ea831a84d2b6 service nova] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.537744] env[61998]: DEBUG nova.compute.manager [req-6bc27b34-6c45-4f18-b8f1-8ba2660a281e req-7efb8e42-1823-41a4-84d1-ea831a84d2b6 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] No waiting events found dispatching network-vif-plugged-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1076.537921] env[61998]: WARNING nova.compute.manager [req-6bc27b34-6c45-4f18-b8f1-8ba2660a281e req-7efb8e42-1823-41a4-84d1-ea831a84d2b6 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received unexpected event network-vif-plugged-33446d72-d352-428e-8a03-c36aaa61c776 for instance with vm_state shelved_offloaded and task_state spawning. [ 1076.618947] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.619157] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.619347] env[61998]: DEBUG nova.network.neutron [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.816928] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 6e71b3c4-bac7-455c-94fd-2a9bc5128132] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1077.319664] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: aaee1558-f98b-4006-93b6-69434c78e79c] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1077.349285] env[61998]: DEBUG nova.network.neutron [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.822893] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2914460e-39e5-495b-96d8-b3580d0318d6] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1077.851837] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.877153] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='106de0571b321df4f4e315eb0ebab1b5',container_format='bare',created_at=2024-10-31T11:58:31Z,direct_url=,disk_format='vmdk',id=33dfbf2a-e277-437f-b9d9-945de9717988,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1258724767-shelved',owner='5dc1064c95484fd4afd1de8243b72d55',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-10-31T11:58:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1077.877482] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1077.877680] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.877876] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1077.878043] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.878201] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1077.878410] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1077.878575] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1077.878747] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1077.878909] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1077.879097] env[61998]: DEBUG nova.virt.hardware [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1077.880187] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb34b1f-baf6-4747-8017-94863b87e575 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.889712] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7396eb-ace3-44d8-b0bd-649abc8a43ab {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.903007] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:86:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98e21102-8954-4f6f-b1e6-5d764a53aa22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33446d72-d352-428e-8a03-c36aaa61c776', 'vif_model': 'vmxnet3'}] {{(pid=61998) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.910260] env[61998]: DEBUG oslo.service.loopingcall [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1077.910479] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Creating VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.910680] env[61998]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78a6c0f1-6e72-4691-8419-41c677724438 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.931085] env[61998]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.931085] env[61998]: value = "task-1389026" [ 1077.931085] env[61998]: _type = "Task" [ 1077.931085] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.938995] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389026, 'name': CreateVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.326595] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 23265b26-7579-4514-a172-8cf2ec124ec6] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1078.441506] env[61998]: DEBUG oslo_vmware.api [-] Task: {'id': task-1389026, 'name': CreateVM_Task, 'duration_secs': 0.352487} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.441711] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Created VM on the ESX host {{(pid=61998) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1078.442293] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.442469] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.442862] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1078.443131] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ec15421-78a1-4358-8f3d-faa5a5ba1b20 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.448229] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1078.448229] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]52ce22f8-6979-7689-c564-f7b5137808bd" [ 1078.448229] env[61998]: _type = "Task" [ 1078.448229] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.456573] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': session[523a9ed6-b255-d82a-34e3-504b542807f6]52ce22f8-6979-7689-c564-f7b5137808bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.561168] env[61998]: DEBUG nova.compute.manager [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-changed-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1078.561378] env[61998]: DEBUG nova.compute.manager [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing instance network info cache due to event network-changed-33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1078.561599] env[61998]: DEBUG oslo_concurrency.lockutils [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.561749] env[61998]: DEBUG oslo_concurrency.lockutils [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.561911] env[61998]: DEBUG nova.network.neutron [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Refreshing network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1078.829445] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: b9c5feec-7bfd-470e-9833-b45403195e83] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1078.959172] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.959438] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Processing image 33dfbf2a-e277-437f-b9d9-945de9717988 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.959629] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988/33dfbf2a-e277-437f-b9d9-945de9717988.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.959785] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988/33dfbf2a-e277-437f-b9d9-945de9717988.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.959972] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.960250] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-faab1994-e3b5-4440-bb5e-d8defd7a545b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.968640] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.968820] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61998) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1078.969501] env[61998]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7063f7f-6324-4f38-9f04-d2667a12f693 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.974534] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1078.974534] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]5227a3fb-35e8-272c-2d3d-f06944c0e026" [ 1078.974534] env[61998]: _type = "Task" [ 1078.974534] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.988752] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Preparing fetch location {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1078.988974] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Fetch image to [datastore1] OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05/OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05.vmdk {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1078.989178] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Downloading stream optimized image 33dfbf2a-e277-437f-b9d9-945de9717988 to [datastore1] OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05/OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05.vmdk on the data store datastore1 as vApp {{(pid=61998) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1078.989350] env[61998]: DEBUG nova.virt.vmwareapi.images [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Downloading image file data 33dfbf2a-e277-437f-b9d9-945de9717988 to the ESX as VM named 'OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05' {{(pid=61998) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1079.054017] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1079.054017] env[61998]: value = "resgroup-9" [ 1079.054017] env[61998]: _type = "ResourcePool" [ 1079.054017] env[61998]: }. {{(pid=61998) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1079.054321] env[61998]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d6a69d3c-46b2-48b9-aa4d-f1451f799772 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.078909] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease: (returnval){ [ 1079.078909] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524f295c-5201-574a-f8cf-ea1187c1d835" [ 1079.078909] env[61998]: _type = "HttpNfcLease" [ 1079.078909] env[61998]: } obtained for vApp import into resource pool (val){ [ 1079.078909] env[61998]: value = "resgroup-9" [ 1079.078909] env[61998]: _type = "ResourcePool" [ 1079.078909] env[61998]: }. {{(pid=61998) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1079.079224] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the lease: (returnval){ [ 1079.079224] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524f295c-5201-574a-f8cf-ea1187c1d835" [ 1079.079224] env[61998]: _type = "HttpNfcLease" [ 1079.079224] env[61998]: } to be ready. {{(pid=61998) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1079.085363] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1079.085363] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524f295c-5201-574a-f8cf-ea1187c1d835" [ 1079.085363] env[61998]: _type = "HttpNfcLease" [ 1079.085363] env[61998]: } is initializing. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1079.299231] env[61998]: DEBUG nova.network.neutron [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updated VIF entry in instance network info cache for port 33446d72-d352-428e-8a03-c36aaa61c776. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1079.299665] env[61998]: DEBUG nova.network.neutron [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.332755] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 1206c5c7-3eae-437b-9386-f3af937b8795] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1079.587403] env[61998]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1079.587403] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524f295c-5201-574a-f8cf-ea1187c1d835" [ 1079.587403] env[61998]: _type = "HttpNfcLease" [ 1079.587403] env[61998]: } is ready. {{(pid=61998) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1079.587719] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1079.587719] env[61998]: value = "session[523a9ed6-b255-d82a-34e3-504b542807f6]524f295c-5201-574a-f8cf-ea1187c1d835" [ 1079.587719] env[61998]: _type = "HttpNfcLease" [ 1079.587719] env[61998]: }. {{(pid=61998) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1079.588422] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76fad54-1f14-4940-b138-15650aeb4362 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.595862] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525279da-66c1-4dd8-3277-2777d117c15b/disk-0.vmdk from lease info. {{(pid=61998) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1079.595918] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525279da-66c1-4dd8-3277-2777d117c15b/disk-0.vmdk. {{(pid=61998) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1079.658427] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-083200d2-e4bc-4913-b412-095037a5ee29 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.802410] env[61998]: DEBUG oslo_concurrency.lockutils [req-2e234789-4879-4d63-9c95-98e6249c0da6 req-25215646-56e6-4485-8c2a-9fc537f5d910 service nova] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.837421] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 0b50a7cc-ef9c-4419-8e64-e0b56ac7de0e] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1080.341261] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 4ca7de74-3bcb-4da0-a2e1-573584467cc9] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1080.573293] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-dde42f82-2616-43f0-a709-d6a63a63dd0d-301fa215-497f-46ff-ad22-1ea5c1e897d4" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.573591] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-dde42f82-2616-43f0-a709-d6a63a63dd0d-301fa215-497f-46ff-ad22-1ea5c1e897d4" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.574019] env[61998]: DEBUG nova.objects.instance [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'flavor' on Instance uuid dde42f82-2616-43f0-a709-d6a63a63dd0d {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.713803] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Completed reading data from the image iterator. {{(pid=61998) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1080.714098] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525279da-66c1-4dd8-3277-2777d117c15b/disk-0.vmdk. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1080.715013] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662fce37-5777-4ef9-9490-e2bb203ef535 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.722735] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525279da-66c1-4dd8-3277-2777d117c15b/disk-0.vmdk is in state: ready. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1080.722923] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525279da-66c1-4dd8-3277-2777d117c15b/disk-0.vmdk. {{(pid=61998) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1080.723241] env[61998]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-61ac8806-1755-48a9-8ce8-222682cd07f7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.845864] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: f3089d53-9c8f-4276-8e2e-0518cf29004b] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1080.906906] env[61998]: DEBUG oslo_vmware.rw_handles [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525279da-66c1-4dd8-3277-2777d117c15b/disk-0.vmdk. {{(pid=61998) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1080.907128] env[61998]: INFO nova.virt.vmwareapi.images [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Downloaded image file data 33dfbf2a-e277-437f-b9d9-945de9717988 [ 1080.907957] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79eacb5-dfe7-4841-9506-5e9d3f742252 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.923646] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42cbcd13-b6e4-4c5f-9680-616f1b05e073 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.955487] env[61998]: INFO nova.virt.vmwareapi.images [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] The imported VM was unregistered [ 1080.957917] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Caching image {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1080.958170] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Creating directory with path [datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988 {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1080.958481] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95020a7d-f521-41b8-9d3a-a4a62519761d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.969548] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Created directory with path [datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988 {{(pid=61998) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1080.969839] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05/OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05.vmdk to [datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988/33dfbf2a-e277-437f-b9d9-945de9717988.vmdk. {{(pid=61998) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1080.970047] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c1382901-a4c7-4177-b92e-b5ec90858c85 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.976688] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1080.976688] env[61998]: value = "task-1389029" [ 1080.976688] env[61998]: _type = "Task" [ 1080.976688] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.984396] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389029, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.182830] env[61998]: DEBUG nova.objects.instance [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'pci_requests' on Instance uuid dde42f82-2616-43f0-a709-d6a63a63dd0d {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.349127] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: bcb05670-dc58-46be-a4a9-58a260e4132f] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1081.489056] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389029, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.685340] env[61998]: DEBUG nova.objects.base [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1081.685563] env[61998]: DEBUG nova.network.neutron [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1081.754919] env[61998]: DEBUG nova.policy [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1081.853163] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: ac4a8463-91ba-4061-aa5d-1c72c4f532ce] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1081.993743] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389029, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.356444] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: f0dc302b-35c3-4ca2-bf06-6b0be8fe0e83] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1082.489751] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389029, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.859547] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: d780cbdc-8838-42bf-8736-bc2dd60e659c] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1082.990242] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389029, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.177468] env[61998]: DEBUG nova.compute.manager [req-58d562db-2517-4717-b4c1-095faffee2fd req-8c61597f-9c2b-456d-bea3-dfc0169355cb service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-vif-plugged-301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1083.177879] env[61998]: DEBUG oslo_concurrency.lockutils [req-58d562db-2517-4717-b4c1-095faffee2fd req-8c61597f-9c2b-456d-bea3-dfc0169355cb service nova] Acquiring lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.178369] env[61998]: DEBUG oslo_concurrency.lockutils [req-58d562db-2517-4717-b4c1-095faffee2fd req-8c61597f-9c2b-456d-bea3-dfc0169355cb service nova] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.178583] env[61998]: DEBUG oslo_concurrency.lockutils [req-58d562db-2517-4717-b4c1-095faffee2fd req-8c61597f-9c2b-456d-bea3-dfc0169355cb service nova] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.178890] env[61998]: DEBUG nova.compute.manager [req-58d562db-2517-4717-b4c1-095faffee2fd req-8c61597f-9c2b-456d-bea3-dfc0169355cb service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] No waiting events found dispatching network-vif-plugged-301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1083.179123] env[61998]: WARNING nova.compute.manager [req-58d562db-2517-4717-b4c1-095faffee2fd req-8c61597f-9c2b-456d-bea3-dfc0169355cb service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received unexpected event network-vif-plugged-301fa215-497f-46ff-ad22-1ea5c1e897d4 for instance with vm_state active and task_state None. [ 1083.282375] env[61998]: DEBUG nova.network.neutron [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Successfully updated port: 301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1083.363027] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: c51f684b-84f0-42b3-acf9-9e8317b10cb6] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1083.489040] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389029, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.784750] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.784996] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.785149] env[61998]: DEBUG nova.network.neutron [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.867322] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: c84d15dc-0ef2-44e2-b579-104678a6bb07] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1083.989957] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389029, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.580211} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.990183] env[61998]: INFO nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05/OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05.vmdk to [datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988/33dfbf2a-e277-437f-b9d9-945de9717988.vmdk. [ 1083.990374] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Cleaning up location [datastore1] OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05 {{(pid=61998) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1083.990542] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_e61e53f0-6a7b-42c4-81bd-06a9a277ac05 {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.990786] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c608ddf5-cbc1-4ed2-bd84-5a7cea6a8a18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.997051] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1083.997051] env[61998]: value = "task-1389030" [ 1083.997051] env[61998]: _type = "Task" [ 1083.997051] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.004115] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.320033] env[61998]: WARNING nova.network.neutron [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] 9b8c99a8-8481-43b6-bb09-1739b4f749c3 already exists in list: networks containing: ['9b8c99a8-8481-43b6-bb09-1739b4f749c3']. ignoring it [ 1084.370797] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: dadd9985-bca3-4207-927f-9490e0ae3f10] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1084.506040] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038769} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.506313] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1084.506487] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988/33dfbf2a-e277-437f-b9d9-945de9717988.vmdk" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.506761] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988/33dfbf2a-e277-437f-b9d9-945de9717988.vmdk to [datastore1] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.507020] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31170e8e-5a93-4fa3-a3e8-ec03b326050b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.513464] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1084.513464] env[61998]: value = "task-1389031" [ 1084.513464] env[61998]: _type = "Task" [ 1084.513464] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.520529] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.584100] env[61998]: DEBUG nova.network.neutron [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "address": "fa:16:3e:31:d7:95", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap301fa215-49", "ovs_interfaceid": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.874404] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 2d0b199f-e0f1-42e0-afb5-e08602aebf01] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1085.026421] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.087097] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.087543] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.087777] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.088618] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa29d97-585c-4f72-a80d-b1de848c9549 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.105817] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1085.106086] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1085.106286] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.106517] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1085.106681] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.106836] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1085.107070] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1085.107271] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1085.107498] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1085.107732] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1085.107866] env[61998]: DEBUG nova.virt.hardware [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1085.114522] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Reconfiguring VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1085.114896] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c48d102d-ff20-47f6-862a-f49b42b976c7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.135611] env[61998]: DEBUG oslo_vmware.api [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1085.135611] env[61998]: value = "task-1389032" [ 1085.135611] env[61998]: _type = "Task" [ 1085.135611] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.147134] env[61998]: DEBUG oslo_vmware.api [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389032, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.205341] env[61998]: DEBUG nova.compute.manager [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-changed-301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1085.205586] env[61998]: DEBUG nova.compute.manager [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing instance network info cache due to event network-changed-301fa215-497f-46ff-ad22-1ea5c1e897d4. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1085.205812] env[61998]: DEBUG oslo_concurrency.lockutils [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.205962] env[61998]: DEBUG oslo_concurrency.lockutils [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.206144] env[61998]: DEBUG nova.network.neutron [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing network info cache for port 301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1085.378376] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: b3a3bb81-843b-4227-bebf-a8079f98c0f8] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1085.525620] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.647768] env[61998]: DEBUG oslo_vmware.api [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389032, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.882690] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: c55717f0-8ef2-4e55-b1cf-60f6faea9e5e] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1085.919102] env[61998]: DEBUG nova.network.neutron [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updated VIF entry in instance network info cache for port 301fa215-497f-46ff-ad22-1ea5c1e897d4. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1085.919564] env[61998]: DEBUG nova.network.neutron [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "address": "fa:16:3e:31:d7:95", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap301fa215-49", "ovs_interfaceid": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.025516] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.149304] env[61998]: DEBUG oslo_vmware.api [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389032, 'name': ReconfigVM_Task, 'duration_secs': 0.600521} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.149822] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.150066] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Reconfigured VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1086.386606] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: a7225abb-d8ea-49fc-85da-7791d9dde5bc] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1086.422655] env[61998]: DEBUG oslo_concurrency.lockutils [req-5c18376d-dfa1-4051-b4e9-891061ab0f38 req-79abd2e1-c260-4704-b7ec-070fbba77984 service nova] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.526343] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.654777] env[61998]: DEBUG oslo_concurrency.lockutils [None req-5f94115a-c455-427c-8370-0cd956d21f11 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-dde42f82-2616-43f0-a709-d6a63a63dd0d-301fa215-497f-46ff-ad22-1ea5c1e897d4" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.081s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.890056] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 5eb786f1-7789-48a0-a04e-a4039e387f58] Instance has had 0 of 5 cleanup attempts {{(pid=61998) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11540}} [ 1087.027170] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.393286] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.393485] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Cleaning up deleted instances with incomplete migration {{(pid=61998) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11565}} [ 1087.527668] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.895834] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.032024] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.286730] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-dde42f82-2616-43f0-a709-d6a63a63dd0d-301fa215-497f-46ff-ad22-1ea5c1e897d4" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.287308] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-dde42f82-2616-43f0-a709-d6a63a63dd0d-301fa215-497f-46ff-ad22-1ea5c1e897d4" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.529105] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389031, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.980929} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.529326] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/33dfbf2a-e277-437f-b9d9-945de9717988/33dfbf2a-e277-437f-b9d9-945de9717988.vmdk to [datastore1] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk {{(pid=61998) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1088.530120] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e86547-e6d3-42c1-a14d-494bb255c0b5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.552279] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk or device None with type streamOptimized {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.552513] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b0fb54e-51f7-4d23-a5ce-3079f15b85bb {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.571567] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1088.571567] env[61998]: value = "task-1389033" [ 1088.571567] env[61998]: _type = "Task" [ 1088.571567] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.578777] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.790079] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.790220] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.791149] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bcda79-c0eb-4fd4-949e-9877b2f63e13 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.808259] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad867e50-e9cb-470a-b03d-0a251870a84c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.834252] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Reconfiguring VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1088.834538] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffba2cca-ad95-4a62-8661-a198e6910bed {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.852296] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1088.852296] env[61998]: value = "task-1389034" [ 1088.852296] env[61998]: _type = "Task" [ 1088.852296] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.859917] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.081298] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389033, 'name': ReconfigVM_Task, 'duration_secs': 0.259673} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.081632] env[61998]: DEBUG nova.virt.vmwareapi.volumeops [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 4c41a59a-59d4-4abd-b173-118e759fc19c/4c41a59a-59d4-4abd-b173-118e759fc19c.vmdk or device None with type streamOptimized {{(pid=61998) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.082232] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92c6daca-66fa-4d99-be3e-805b1fc224b4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.087822] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1089.087822] env[61998]: value = "task-1389035" [ 1089.087822] env[61998]: _type = "Task" [ 1089.087822] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.094985] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389035, 'name': Rename_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.362360] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.398018] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.398247] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.598018] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389035, 'name': Rename_Task, 'duration_secs': 0.128921} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.598316] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powering on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1089.598573] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a218364-982e-48fe-9a5a-f7119b731749 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.604747] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1089.604747] env[61998]: value = "task-1389036" [ 1089.604747] env[61998]: _type = "Task" [ 1089.604747] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.613324] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.862754] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.903418] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.903592] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 1090.115521] env[61998]: DEBUG oslo_vmware.api [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389036, 'name': PowerOnVM_Task, 'duration_secs': 0.432133} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.115859] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powered on the VM {{(pid=61998) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1090.221063] env[61998]: DEBUG nova.compute.manager [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Checking state {{(pid=61998) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1090.221995] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c520ff9f-027c-4f8e-9fff-5e8c7fa92629 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.363936] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.738581] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3081a0a9-9962-4571-a68d-936d4f711ae9 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.014s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.864261] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.364353] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.439507] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.439678] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquired lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.439837] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Forcefully refreshing network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1091.865536] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.366298] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.664938] env[61998]: DEBUG nova.network.neutron [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [{"id": "33446d72-d352-428e-8a03-c36aaa61c776", "address": "fa:16:3e:fe:86:df", "network": {"id": "39b5ecbc-2969-4004-a012-b3dc237269f8", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-187938211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dc1064c95484fd4afd1de8243b72d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98e21102-8954-4f6f-b1e6-5d764a53aa22", "external-id": "nsx-vlan-transportzone-838", "segmentation_id": 838, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33446d72-d3", "ovs_interfaceid": "33446d72-d352-428e-8a03-c36aaa61c776", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.866286] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.168226] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Releasing lock "refresh_cache-4c41a59a-59d4-4abd-b173-118e759fc19c" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.168467] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updated the network info_cache for instance {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10298}} [ 1093.168686] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.168846] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.168993] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.169163] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.169309] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.169457] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.169582] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 1093.169724] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.366776] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.672867] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.673150] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.673327] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.673482] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1093.674398] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a6fc35-6701-46dc-92c7-38039d1f8d18 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.682316] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1a6d9e-12b4-4836-8b60-6afd6b5cae5f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.695472] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67152c1e-61e3-40b6-baf8-ce95b67b9a5c {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.701688] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3229e0d1-b330-44c9-bb2c-92a3ee822b44 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.729422] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180756MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1093.729570] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.729757] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.867903] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.368629] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.827766] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance dde42f82-2616-43f0-a709-d6a63a63dd0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.827932] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance c16a959d-9c28-480b-aa62-51e7804ad0ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.828075] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Instance 4c41a59a-59d4-4abd-b173-118e759fc19c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61998) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1094.828294] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1094.828452] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1094.843527] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Refreshing inventories for resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1094.856830] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Updating ProviderTree inventory for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1094.857017] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Updating inventory in ProviderTree for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1094.867984] env[61998]: DEBUG oslo_vmware.api [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389034, 'name': ReconfigVM_Task, 'duration_secs': 5.732709} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.868723] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Refreshing aggregate associations for resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd, aggregates: None {{(pid=61998) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1094.870359] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.870565] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Reconfigured VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1094.886783] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Refreshing trait associations for resource provider c8c34fc8-902a-460e-a93a-a1e887f55ddd, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64 {{(pid=61998) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1094.929064] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b0cbc1-1d57-47f9-a1b9-f27d5e4c5e31 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.936193] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7ed5ad-2b67-4635-96ba-bdb42bfc34f4 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.967428] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134ab4d9-935f-4ef5-ad17-3f782819e6c8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.974436] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637b7f27-74b0-434d-a9ce-6963d4d9e763 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.986785] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.489844] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1095.994960] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1095.995176] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.265s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.191650] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.191879] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.192085] env[61998]: DEBUG nova.network.neutron [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1096.894452] env[61998]: DEBUG nova.compute.manager [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1096.894738] env[61998]: DEBUG nova.compute.manager [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing instance network info cache due to event network-changed-723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1096.894873] env[61998]: DEBUG oslo_concurrency.lockutils [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] Acquiring lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.966378] env[61998]: INFO nova.network.neutron [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Port 301fa215-497f-46ff-ad22-1ea5c1e897d4 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1096.966736] env[61998]: DEBUG nova.network.neutron [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.469167] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.472054] env[61998]: DEBUG oslo_concurrency.lockutils [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] Acquired lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.472054] env[61998]: DEBUG nova.network.neutron [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Refreshing network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.597771] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-c16a959d-9c28-480b-aa62-51e7804ad0ed-301fa215-497f-46ff-ad22-1ea5c1e897d4" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.598038] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c16a959d-9c28-480b-aa62-51e7804ad0ed-301fa215-497f-46ff-ad22-1ea5c1e897d4" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.598468] env[61998]: DEBUG nova.objects.instance [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'flavor' on Instance uuid c16a959d-9c28-480b-aa62-51e7804ad0ed {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.974179] env[61998]: DEBUG oslo_concurrency.lockutils [None req-cc6df44a-e36c-4465-beb0-5a5caf8fab81 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-dde42f82-2616-43f0-a709-d6a63a63dd0d-301fa215-497f-46ff-ad22-1ea5c1e897d4" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.687s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.167756] env[61998]: DEBUG nova.network.neutron [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updated VIF entry in instance network info cache for port 723ec6af-ec74-4c82-ae7c-4795b74d6aad. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.168132] env[61998]: DEBUG nova.network.neutron [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [{"id": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "address": "fa:16:3e:23:8d:43", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap723ec6af-ec", "ovs_interfaceid": "723ec6af-ec74-4c82-ae7c-4795b74d6aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.194795] env[61998]: DEBUG nova.objects.instance [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'pci_requests' on Instance uuid c16a959d-9c28-480b-aa62-51e7804ad0ed {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.671165] env[61998]: DEBUG oslo_concurrency.lockutils [req-2c570fe0-8f1c-4764-9da5-8640a5d79614 req-83ca02f3-317e-4a9b-ad45-74c54245c0c9 service nova] Releasing lock "refresh_cache-dde42f82-2616-43f0-a709-d6a63a63dd0d" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.697216] env[61998]: DEBUG nova.objects.base [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61998) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1098.697451] env[61998]: DEBUG nova.network.neutron [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] allocate_for_instance() {{(pid=61998) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1098.760012] env[61998]: DEBUG nova.policy [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3066202e35b643d1b6d3f2d8b4d724ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e479b6ac56f464fbc86574f776cd96c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61998) authorize /opt/stack/nova/nova/policy.py:201}} [ 1098.921700] env[61998]: DEBUG nova.compute.manager [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-changed-0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1098.921873] env[61998]: DEBUG nova.compute.manager [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing instance network info cache due to event network-changed-0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1098.922616] env[61998]: DEBUG oslo_concurrency.lockutils [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.922781] env[61998]: DEBUG oslo_concurrency.lockutils [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.922952] env[61998]: DEBUG nova.network.neutron [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing network info cache for port 0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1099.852854] env[61998]: DEBUG nova.network.neutron [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updated VIF entry in instance network info cache for port 0891a509-232a-48e5-be4b-73a585033317. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.853250] env[61998]: DEBUG nova.network.neutron [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.130714] env[61998]: DEBUG nova.compute.manager [req-5ca255da-64dc-4fd4-9505-4ceb7aaa8a5c req-cf065f59-3441-45d9-bc0d-e6e36613e471 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-vif-plugged-301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1100.130975] env[61998]: DEBUG oslo_concurrency.lockutils [req-5ca255da-64dc-4fd4-9505-4ceb7aaa8a5c req-cf065f59-3441-45d9-bc0d-e6e36613e471 service nova] Acquiring lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.131209] env[61998]: DEBUG oslo_concurrency.lockutils [req-5ca255da-64dc-4fd4-9505-4ceb7aaa8a5c req-cf065f59-3441-45d9-bc0d-e6e36613e471 service nova] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.131379] env[61998]: DEBUG oslo_concurrency.lockutils [req-5ca255da-64dc-4fd4-9505-4ceb7aaa8a5c req-cf065f59-3441-45d9-bc0d-e6e36613e471 service nova] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.131550] env[61998]: DEBUG nova.compute.manager [req-5ca255da-64dc-4fd4-9505-4ceb7aaa8a5c req-cf065f59-3441-45d9-bc0d-e6e36613e471 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] No waiting events found dispatching network-vif-plugged-301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1100.131801] env[61998]: WARNING nova.compute.manager [req-5ca255da-64dc-4fd4-9505-4ceb7aaa8a5c req-cf065f59-3441-45d9-bc0d-e6e36613e471 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received unexpected event network-vif-plugged-301fa215-497f-46ff-ad22-1ea5c1e897d4 for instance with vm_state active and task_state None. [ 1100.211866] env[61998]: DEBUG nova.network.neutron [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Successfully updated port: 301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1100.356224] env[61998]: DEBUG oslo_concurrency.lockutils [req-a941b642-18c6-4c57-aeec-caa2a1aeff18 req-d6519db9-d18f-440b-b4ae-656e0ae34987 service nova] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.715135] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.715312] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.715464] env[61998]: DEBUG nova.network.neutron [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1101.248568] env[61998]: WARNING nova.network.neutron [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] 9b8c99a8-8481-43b6-bb09-1739b4f749c3 already exists in list: networks containing: ['9b8c99a8-8481-43b6-bb09-1739b4f749c3']. ignoring it [ 1101.497033] env[61998]: DEBUG nova.network.neutron [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "address": "fa:16:3e:31:d7:95", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap301fa215-49", "ovs_interfaceid": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.999881] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.000546] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.000819] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.001779] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33897e8-6d98-4a5c-893e-99320b751da7 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.018749] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-31T11:48:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1102.018969] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1102.019144] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image limits 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1102.019336] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Flavor pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1102.019495] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Image pref 0:0:0 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1102.019638] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61998) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1102.019848] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1102.020016] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1102.020197] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Got 1 possible topologies {{(pid=61998) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1102.020369] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1102.020545] env[61998]: DEBUG nova.virt.hardware [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61998) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1102.026734] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Reconfiguring VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1102.027321] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-196049ec-181a-4d77-a882-406210b6f4de {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.043307] env[61998]: DEBUG oslo_vmware.api [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1102.043307] env[61998]: value = "task-1389037" [ 1102.043307] env[61998]: _type = "Task" [ 1102.043307] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.050584] env[61998]: DEBUG oslo_vmware.api [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389037, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.158304] env[61998]: DEBUG nova.compute.manager [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-changed-301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1102.158574] env[61998]: DEBUG nova.compute.manager [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing instance network info cache due to event network-changed-301fa215-497f-46ff-ad22-1ea5c1e897d4. {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11435}} [ 1102.158713] env[61998]: DEBUG oslo_concurrency.lockutils [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.158861] env[61998]: DEBUG oslo_concurrency.lockutils [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.159040] env[61998]: DEBUG nova.network.neutron [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Refreshing network info cache for port 301fa215-497f-46ff-ad22-1ea5c1e897d4 {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1102.553597] env[61998]: DEBUG oslo_vmware.api [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.840465] env[61998]: DEBUG nova.network.neutron [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updated VIF entry in instance network info cache for port 301fa215-497f-46ff-ad22-1ea5c1e897d4. {{(pid=61998) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1102.840908] env[61998]: DEBUG nova.network.neutron [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "address": "fa:16:3e:31:d7:95", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap301fa215-49", "ovs_interfaceid": "301fa215-497f-46ff-ad22-1ea5c1e897d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.054457] env[61998]: DEBUG oslo_vmware.api [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.344383] env[61998]: DEBUG oslo_concurrency.lockutils [req-415bfbd1-41fd-4c4d-b6d6-3105042d3042 req-f02be26f-d2da-46c7-abef-566359ee3c9b service nova] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.555248] env[61998]: DEBUG oslo_vmware.api [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.059886] env[61998]: DEBUG oslo_vmware.api [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389037, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.557652] env[61998]: DEBUG oslo_vmware.api [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389037, 'name': ReconfigVM_Task, 'duration_secs': 2.022351} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.558229] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.558466] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Reconfigured VM to attach interface {{(pid=61998) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1105.063639] env[61998]: DEBUG oslo_concurrency.lockutils [None req-61bead03-6f6a-4024-9b06-151aca74ff2d tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c16a959d-9c28-480b-aa62-51e7804ad0ed-301fa215-497f-46ff-ad22-1ea5c1e897d4" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.465s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.304144] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "interface-c16a959d-9c28-480b-aa62-51e7804ad0ed-301fa215-497f-46ff-ad22-1ea5c1e897d4" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.304670] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c16a959d-9c28-480b-aa62-51e7804ad0ed-301fa215-497f-46ff-ad22-1ea5c1e897d4" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.807836] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.808062] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.808953] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58d974a-54d1-4b37-a734-c6d6bb5482f9 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.827219] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e542dae2-338a-4797-9dce-8cc37d31fedc {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.853027] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Reconfiguring VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1106.853027] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d68b70e-485b-4a42-995f-5c3fc0bd4124 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.870698] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1106.870698] env[61998]: value = "task-1389038" [ 1106.870698] env[61998]: _type = "Task" [ 1106.870698] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.878239] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.381720] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.881102] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.382150] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.882471] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.385273] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.883923] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.385268] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.885328] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.385155] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.886041] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.386878] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.888275] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.389334] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.890807] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.390886] env[61998]: DEBUG oslo_vmware.api [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389038, 'name': ReconfigVM_Task, 'duration_secs': 7.124045} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.391148] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.391367] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Reconfigured VM to detach interface {{(pid=61998) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1115.667808] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.668232] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquired lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.668232] env[61998]: DEBUG nova.network.neutron [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Building network info cache for instance {{(pid=61998) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.699978] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.700219] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.700432] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.700613] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.700783] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.703517] env[61998]: INFO nova.compute.manager [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Terminating instance [ 1115.705179] env[61998]: DEBUG nova.compute.manager [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1115.705378] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.706223] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ec2f0c-4e34-48c0-8b89-03c0ccb0ce99 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.714525] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1115.714742] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b1bb571-f62b-445a-a4f6-7888710d0267 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.721439] env[61998]: DEBUG oslo_vmware.api [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1115.721439] env[61998]: value = "task-1389039" [ 1115.721439] env[61998]: _type = "Task" [ 1115.721439] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.728658] env[61998]: DEBUG oslo_vmware.api [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.233130] env[61998]: DEBUG oslo_vmware.api [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389039, 'name': PowerOffVM_Task, 'duration_secs': 0.134405} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.233406] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.233660] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1116.233856] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b03cec3-adc9-47a0-8f13-e394c46e3c0f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.297034] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1116.297279] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1116.297463] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleting the datastore file [datastore1] c16a959d-9c28-480b-aa62-51e7804ad0ed {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1116.297772] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55aafc5e-a4d6-4ca6-aa29-622c6048a0a8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.304119] env[61998]: DEBUG oslo_vmware.api [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1116.304119] env[61998]: value = "task-1389041" [ 1116.304119] env[61998]: _type = "Task" [ 1116.304119] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.311658] env[61998]: DEBUG oslo_vmware.api [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389041, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.383961] env[61998]: INFO nova.network.neutron [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Port 301fa215-497f-46ff-ad22-1ea5c1e897d4 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1116.384350] env[61998]: DEBUG nova.network.neutron [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [{"id": "0891a509-232a-48e5-be4b-73a585033317", "address": "fa:16:3e:9b:86:5e", "network": {"id": "9b8c99a8-8481-43b6-bb09-1739b4f749c3", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1264689805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e479b6ac56f464fbc86574f776cd96c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0891a509-23", "ovs_interfaceid": "0891a509-232a-48e5-be4b-73a585033317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.814186] env[61998]: DEBUG oslo_vmware.api [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389041, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146462} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.814613] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.814613] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.814741] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.814915] env[61998]: INFO nova.compute.manager [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1116.815167] env[61998]: DEBUG oslo.service.loopingcall [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1116.815360] env[61998]: DEBUG nova.compute.manager [-] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1116.815453] env[61998]: DEBUG nova.network.neutron [-] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1116.886825] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Releasing lock "refresh_cache-c16a959d-9c28-480b-aa62-51e7804ad0ed" {{(pid=61998) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.391609] env[61998]: DEBUG oslo_concurrency.lockutils [None req-3996822c-6d7d-4b6a-a27d-8b41988d2360 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "interface-c16a959d-9c28-480b-aa62-51e7804ad0ed-301fa215-497f-46ff-ad22-1ea5c1e897d4" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.087s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.427494] env[61998]: DEBUG nova.compute.manager [req-fbe5d527-402b-4fd3-a47e-09e6e672b9bf req-9cb6eff2-3ccd-4dcb-b0a2-f3961ccfc427 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Received event network-vif-deleted-0891a509-232a-48e5-be4b-73a585033317 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1117.427762] env[61998]: INFO nova.compute.manager [req-fbe5d527-402b-4fd3-a47e-09e6e672b9bf req-9cb6eff2-3ccd-4dcb-b0a2-f3961ccfc427 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Neutron deleted interface 0891a509-232a-48e5-be4b-73a585033317; detaching it from the instance and deleting it from the info cache [ 1117.427964] env[61998]: DEBUG nova.network.neutron [req-fbe5d527-402b-4fd3-a47e-09e6e672b9bf req-9cb6eff2-3ccd-4dcb-b0a2-f3961ccfc427 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.902266] env[61998]: DEBUG nova.network.neutron [-] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.930804] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25051cae-ed5f-4240-8681-3246ca6d384b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.940627] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7670c3-889b-4c1d-88d4-8dbdc4955140 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.965476] env[61998]: DEBUG nova.compute.manager [req-fbe5d527-402b-4fd3-a47e-09e6e672b9bf req-9cb6eff2-3ccd-4dcb-b0a2-f3961ccfc427 service nova] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Detach interface failed, port_id=0891a509-232a-48e5-be4b-73a585033317, reason: Instance c16a959d-9c28-480b-aa62-51e7804ad0ed could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1118.405070] env[61998]: INFO nova.compute.manager [-] [instance: c16a959d-9c28-480b-aa62-51e7804ad0ed] Took 1.59 seconds to deallocate network for instance. [ 1118.911674] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.911977] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.912203] env[61998]: DEBUG nova.objects.instance [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'resources' on Instance uuid c16a959d-9c28-480b-aa62-51e7804ad0ed {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.468481] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f827ecf1-0d56-42d7-af6f-3d5318e951a3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.477356] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54725be7-7454-4609-b4fb-913e0b488075 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.506542] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1a3dd0-351b-47e5-af66-04475cd01fb1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.513382] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1680b2c3-906e-4a72-bed8-97063fcff09d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.526072] env[61998]: DEBUG nova.compute.provider_tree [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.029206] env[61998]: DEBUG nova.scheduler.client.report [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1120.534665] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.623s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.555037] env[61998]: INFO nova.scheduler.client.report [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted allocations for instance c16a959d-9c28-480b-aa62-51e7804ad0ed [ 1121.065606] env[61998]: DEBUG oslo_concurrency.lockutils [None req-d52a7a83-6e14-4447-ac67-74c0dc8e89f7 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "c16a959d-9c28-480b-aa62-51e7804ad0ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.365s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.277054] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.277054] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.277054] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.277399] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.277399] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.279463] env[61998]: INFO nova.compute.manager [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Terminating instance [ 1121.281271] env[61998]: DEBUG nova.compute.manager [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1121.281466] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1121.282314] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad578bb9-add9-4dbc-8fbf-439de410528d {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.290412] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.290635] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b549da4-2e77-4481-bce3-d5030a5af023 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.296932] env[61998]: DEBUG oslo_vmware.api [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1121.296932] env[61998]: value = "task-1389042" [ 1121.296932] env[61998]: _type = "Task" [ 1121.296932] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.304204] env[61998]: DEBUG oslo_vmware.api [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.806654] env[61998]: DEBUG oslo_vmware.api [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389042, 'name': PowerOffVM_Task, 'duration_secs': 0.197967} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.806948] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.807163] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1121.807441] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f428fbf-6830-4c8c-a24d-e4967cdabab2 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.870614] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1121.870831] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Deleting contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1121.871033] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleting the datastore file [datastore2] dde42f82-2616-43f0-a709-d6a63a63dd0d {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1121.871310] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-875b751d-18e1-408d-b70f-2095cbbb3b80 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.877861] env[61998]: DEBUG oslo_vmware.api [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for the task: (returnval){ [ 1121.877861] env[61998]: value = "task-1389044" [ 1121.877861] env[61998]: _type = "Task" [ 1121.877861] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.884775] env[61998]: DEBUG oslo_vmware.api [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.388477] env[61998]: DEBUG oslo_vmware.api [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Task: {'id': task-1389044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145155} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.388886] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.388962] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Deleted contents of the VM from datastore datastore2 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.389120] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.389307] env[61998]: INFO nova.compute.manager [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1122.389555] env[61998]: DEBUG oslo.service.loopingcall [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.389751] env[61998]: DEBUG nova.compute.manager [-] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1122.389846] env[61998]: DEBUG nova.network.neutron [-] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1122.633035] env[61998]: DEBUG nova.compute.manager [req-b5f23fa5-8ca0-417d-902f-74aaf6a18a99 req-76c2c6b7-5315-4490-acba-b8b95cac9a88 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Received event network-vif-deleted-723ec6af-ec74-4c82-ae7c-4795b74d6aad {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1122.633307] env[61998]: INFO nova.compute.manager [req-b5f23fa5-8ca0-417d-902f-74aaf6a18a99 req-76c2c6b7-5315-4490-acba-b8b95cac9a88 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Neutron deleted interface 723ec6af-ec74-4c82-ae7c-4795b74d6aad; detaching it from the instance and deleting it from the info cache [ 1122.633430] env[61998]: DEBUG nova.network.neutron [req-b5f23fa5-8ca0-417d-902f-74aaf6a18a99 req-76c2c6b7-5315-4490-acba-b8b95cac9a88 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.113141] env[61998]: DEBUG nova.network.neutron [-] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.136218] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67a007c2-9d6e-41e9-8114-2c8b7762794e {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.145239] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406e68ab-bd7f-4f43-9faf-2372f68dfd17 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.167928] env[61998]: DEBUG nova.compute.manager [req-b5f23fa5-8ca0-417d-902f-74aaf6a18a99 req-76c2c6b7-5315-4490-acba-b8b95cac9a88 service nova] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Detach interface failed, port_id=723ec6af-ec74-4c82-ae7c-4795b74d6aad, reason: Instance dde42f82-2616-43f0-a709-d6a63a63dd0d could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1123.615417] env[61998]: INFO nova.compute.manager [-] [instance: dde42f82-2616-43f0-a709-d6a63a63dd0d] Took 1.23 seconds to deallocate network for instance. [ 1124.122759] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.123280] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.123667] env[61998]: DEBUG nova.objects.instance [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lazy-loading 'resources' on Instance uuid dde42f82-2616-43f0-a709-d6a63a63dd0d {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.805111] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222b5de3-6182-4b0e-92db-cd6df543b862 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.812997] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14a5f31-74e6-4030-92ec-9d25affe8a92 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.841805] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1943d36-d399-47c4-9b5e-f2d46240db66 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.848396] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d01489-0d24-4cee-8e8d-e6eeedd74890 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.860912] env[61998]: DEBUG nova.compute.provider_tree [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.364909] env[61998]: DEBUG nova.scheduler.client.report [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1125.869906] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.747s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.888961] env[61998]: INFO nova.scheduler.client.report [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Deleted allocations for instance dde42f82-2616-43f0-a709-d6a63a63dd0d [ 1126.398035] env[61998]: DEBUG oslo_concurrency.lockutils [None req-b25d1173-7783-4b1e-8896-dd0ad89f03c5 tempest-AttachInterfacesTestJSON-2125592630 tempest-AttachInterfacesTestJSON-2125592630-project-member] Lock "dde42f82-2616-43f0-a709-d6a63a63dd0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.121s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.689281] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.689591] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.689818] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.689999] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.690577] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.692881] env[61998]: INFO nova.compute.manager [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Terminating instance [ 1127.694949] env[61998]: DEBUG nova.compute.manager [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Start destroying the instance on the hypervisor. {{(pid=61998) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3159}} [ 1127.694949] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Destroying instance {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1127.695656] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3787e38f-41bc-4f48-856b-068becfd1924 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.704188] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powering off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.704386] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8e0db54-8449-48bb-8f68-2db1bd195a50 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.713666] env[61998]: DEBUG oslo_vmware.api [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1127.713666] env[61998]: value = "task-1389045" [ 1127.713666] env[61998]: _type = "Task" [ 1127.713666] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.721781] env[61998]: DEBUG oslo_vmware.api [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.225716] env[61998]: DEBUG oslo_vmware.api [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389045, 'name': PowerOffVM_Task, 'duration_secs': 0.209212} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.225716] env[61998]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Powered off the VM {{(pid=61998) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1128.225716] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Unregistering the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1128.225977] env[61998]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e40ae2da-f693-425a-a87e-2efde09e43a5 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.358076] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Unregistered the VM {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1128.358076] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Deleting contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1128.358076] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleting the datastore file [datastore1] 4c41a59a-59d4-4abd-b173-118e759fc19c {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1128.358076] env[61998]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0435fcc-0beb-4634-befc-5f30b3f6ce7b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.364640] env[61998]: DEBUG oslo_vmware.api [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for the task: (returnval){ [ 1128.364640] env[61998]: value = "task-1389047" [ 1128.364640] env[61998]: _type = "Task" [ 1128.364640] env[61998]: } to complete. {{(pid=61998) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.373609] env[61998]: DEBUG oslo_vmware.api [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.876613] env[61998]: DEBUG oslo_vmware.api [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Task: {'id': task-1389047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126249} completed successfully. {{(pid=61998) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.878044] env[61998]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted the datastore file {{(pid=61998) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1128.878044] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Deleted contents of the VM from datastore datastore1 {{(pid=61998) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1128.878044] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Instance destroyed {{(pid=61998) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1128.878044] env[61998]: INFO nova.compute.manager [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1128.878270] env[61998]: DEBUG oslo.service.loopingcall [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61998) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1128.878406] env[61998]: DEBUG nova.compute.manager [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Deallocating network for instance {{(pid=61998) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1128.878502] env[61998]: DEBUG nova.network.neutron [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] deallocate_for_instance() {{(pid=61998) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1129.295928] env[61998]: DEBUG nova.compute.manager [req-3a0ad4fc-21e5-4035-a012-2123e9506c7a req-f43a5218-3e68-4196-8452-e783b10ea22a service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Received event network-vif-deleted-33446d72-d352-428e-8a03-c36aaa61c776 {{(pid=61998) external_instance_event /opt/stack/nova/nova/compute/manager.py:11430}} [ 1129.296103] env[61998]: INFO nova.compute.manager [req-3a0ad4fc-21e5-4035-a012-2123e9506c7a req-f43a5218-3e68-4196-8452-e783b10ea22a service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Neutron deleted interface 33446d72-d352-428e-8a03-c36aaa61c776; detaching it from the instance and deleting it from the info cache [ 1129.296303] env[61998]: DEBUG nova.network.neutron [req-3a0ad4fc-21e5-4035-a012-2123e9506c7a req-f43a5218-3e68-4196-8452-e783b10ea22a service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.768948] env[61998]: DEBUG nova.network.neutron [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Updating instance_info_cache with network_info: [] {{(pid=61998) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.798503] env[61998]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8da38e42-e701-427c-ae9e-3636ef72e002 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.809035] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b360fc8-6d20-487b-8821-6211ac334c9a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.834156] env[61998]: DEBUG nova.compute.manager [req-3a0ad4fc-21e5-4035-a012-2123e9506c7a req-f43a5218-3e68-4196-8452-e783b10ea22a service nova] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Detach interface failed, port_id=33446d72-d352-428e-8a03-c36aaa61c776, reason: Instance 4c41a59a-59d4-4abd-b173-118e759fc19c could not be found. {{(pid=61998) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11264}} [ 1130.272127] env[61998]: INFO nova.compute.manager [-] [instance: 4c41a59a-59d4-4abd-b173-118e759fc19c] Took 1.39 seconds to deallocate network for instance. [ 1130.778570] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.778895] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.779181] env[61998]: DEBUG nova.objects.instance [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lazy-loading 'resources' on Instance uuid 4c41a59a-59d4-4abd-b173-118e759fc19c {{(pid=61998) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.312972] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db2a71c-3581-4d0e-ad41-dbf3c947bfa8 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.320219] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5096d7-e91c-457d-a739-1e25502a01e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.349107] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a6e9c6-f07e-4e71-beb0-eee4d2ec524a {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.356316] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297a19ff-11a6-4f41-8420-93fcdcf92d97 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.369633] env[61998]: DEBUG nova.compute.provider_tree [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.872487] env[61998]: DEBUG nova.scheduler.client.report [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1132.377127] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.394665] env[61998]: INFO nova.scheduler.client.report [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Deleted allocations for instance 4c41a59a-59d4-4abd-b173-118e759fc19c [ 1132.903264] env[61998]: DEBUG oslo_concurrency.lockutils [None req-edc8429a-cb37-45be-84ae-e94b31768057 tempest-AttachVolumeShelveTestJSON-915141329 tempest-AttachVolumeShelveTestJSON-915141329-project-member] Lock "4c41a59a-59d4-4abd-b173-118e759fc19c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.214s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.610216] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.610642] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Starting heal instance info cache {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10227}} [ 1133.610642] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Rebuilding the list of instances to heal {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10231}} [ 1134.113647] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Didn't find any instances for network info cache update. {{(pid=61998) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1134.113777] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.114323] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.114323] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.114323] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.619681] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Getting list of instances from cluster (obj){ [ 1134.619681] env[61998]: value = "domain-c8" [ 1134.619681] env[61998]: _type = "ClusterComputeResource" [ 1134.619681] env[61998]: } {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1134.620795] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c630d9-1809-4112-803c-9411376992e3 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.629585] env[61998]: DEBUG nova.virt.vmwareapi.vmops [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Got total of 0 instances {{(pid=61998) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1134.629823] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.629965] env[61998]: DEBUG nova.compute.manager [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61998) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10846}} [ 1134.630136] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.133723] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.133965] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.134159] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.134316] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61998) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1135.135602] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0d76de-5e67-44bc-8df5-6c3234d2103b {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.143777] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f301b4-7f74-484d-a0cb-2516411b80cd {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.157915] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741c30aa-f415-48e1-ac82-ae9612927c78 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.165409] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda0c896-68d9-497c-9205-f0a6d5b2156f {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.194804] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180965MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=61998) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1135.194990] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.195212] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.219425] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1136.219731] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61998) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1136.234667] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f418b5-d9e4-4624-ab17-7c630fdcf703 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.242236] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4133bd-3176-40d5-9307-bdc6f4222397 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.272085] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6ce166-b468-48b7-b8d8-c770a291f1ff {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.279742] env[61998]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a3a03c-5751-46ba-a17b-afbb76e33ae1 {{(pid=61998) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.292432] env[61998]: DEBUG nova.compute.provider_tree [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed in ProviderTree for provider: c8c34fc8-902a-460e-a93a-a1e887f55ddd {{(pid=61998) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.795326] env[61998]: DEBUG nova.scheduler.client.report [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Inventory has not changed for provider c8c34fc8-902a-460e-a93a-a1e887f55ddd based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61998) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1137.300350] env[61998]: DEBUG nova.compute.resource_tracker [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61998) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1137.300583] env[61998]: DEBUG oslo_concurrency.lockutils [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.105s {{(pid=61998) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.428272] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.428659] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.743763] env[61998]: DEBUG oslo_service.periodic_task [None req-12ad4782-81a7-4677-9306-37d058ade6f6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61998) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}